diff --git a/MANIFEST.in b/MANIFEST.in index 2bed7744..2a563258 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,6 @@ include Makefile include requirements.txt include requirements-swh.txt include version.txt -recursive-include swh/web/ui/static * -recursive-include swh/web/ui/templates * +recursive-include swh/web/static * +recursive-include swh/web/api/templates * diff --git a/Makefile.local b/Makefile.local index 6b3324de..f03a989a 100644 --- a/Makefile.local +++ b/Makefile.local @@ -1,13 +1,13 @@ -SWH_WEB_UI=./bin/swh-web-ui-dev +SWH_WEB_UI=./bin/swh-web-dev FLAG=-v NOSEFLAGS=-v -s TOOL=pandoc run-dev: $(SWH_WEB_UI) $(FLAG) --config ./resources/test/webapp.yml run: # works with the default ~/.config/swh/webapp.yml file $(SWH_WEB_UI) $(FLAG) doc: - cd swh/web/ui/templates/includes/ && pandoc -o apidoc-header.html apidoc-header.md + cd swh/web/api/templates/includes/ && pandoc -o apidoc-header.html apidoc-header.md diff --git a/README b/README index 49534b4a..bfe45627 100644 --- a/README +++ b/README @@ -1,4 +1,4 @@ -swh-web-ui -========== +swh-web-api +=========== SWH's API + web front-end diff --git a/README-dev.md b/README-dev.md index db7d0cb3..8fcd4291 100644 --- a/README-dev.md +++ b/README-dev.md @@ -1,54 +1,59 @@ README-dev ========== -# modules' description +# Run server + +Either use the django manage script directly (useful in development mode as it offers various commands). +The configuration will be taken from the default configuration file: '~/.config/swh/webapp.yml'. +``` +python3 -m swh.web.manage runserver +``` -## Main +or use the following command: -swh.web.ui.main: Start the server or the dev server +``` +./bin/swh-web-dev --config +``` + +# modules' description ## Layers -Folder swh/web/ui/: +Folder swh/web/api/: -- api main api endpoints definition (api) -- views main ui endpoints (web app) -- service Orchestration layer used by api/view module. +- views main api endpoints definitions (html browsable + json + yaml) +- service Orchestration layer used by views module. In charge of communication with `backend` to retrieve information and conversion for the upper layer. - backend Lower layer in charge of communication with swh storage. Used by `service` module. In short: -1. views -depends-> api -depends-> service -depends-> backend ----asks----> swh-storage -2. views <- api <- service <- backend <----rets---- swh-storage +1. views -depends-> service -depends-> backend ----asks----> swh-storage +2. views <- service <- backend <----rets---- swh-storage ## Utilities -Folder swh/web/ui/: +Folder swh/web/api/: - apidoc Browsable api functions. -- exc Exception definitions. -- errorhandler Exception (defined in `exc`) handlers. - Use at route definition time (`api`, `views`). -- renderers Rendering utilities (html, json, yaml, data...). - Use at route definition time (`api`, `views`). +- apiresponse Api response utility functions +- apiurls Api routes registration functions +- exc Exception definitions - converters conversion layer to transform swh data to serializable data. Used by `service` to convert data before transmitting to `api` or `views`. - query Utilities to parse data from http endpoints. Used by `service` -- upload Utility module to deal with upload of data to webapp or api. - Used by `api` -- utils Utilities used throughout swh-web-ui. +- utils Utilities used throughout swh-web-api. ### About apidoc This is a 'decorator tower' that stores the data associated with the documentation upon loading the apidoc module. The top decorator of any tower should be @apidoc.route(). Apidoc raises an exception if this decorator is missing, and flask raises an exception if it is present but not at the top of the tower. ## Graphics summary ![Summary dependencies](./docs/dependencies.png) diff --git a/bin/swh-web-ui-dev b/bin/swh-web-dev similarity index 57% rename from bin/swh-web-ui-dev rename to bin/swh-web-dev index 5622bf5c..0a6bfe07 100755 --- a/bin/swh-web-ui-dev +++ b/bin/swh-web-dev @@ -1,35 +1,45 @@ #!/usr/bin/env python3 # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import argparse +import django +import os -from swh.web.ui import main +from django.core import management +from django.core.management.commands.runserver import ( + Command as runserver +) + +from swh.web import config # Default configuration file -DEFAULT_CONF_FILE = '~/.config/swh/webapp.ini' +DEFAULT_CONF_FILE = '~/.config/swh/webapp.yml' def parse_args(): """Parse the configuration for the cli. """ cli = argparse.ArgumentParser(description="SWH's web ui.") - cli.add_argument('--verbose', '-v', action='store_true', - help='Verbosity level in log file.') + cli.add_argument('--config', '-c', help='configuration file path') args = cli.parse_args() return args if __name__ == '__main__': args = parse_args() config_path = args.config or DEFAULT_CONF_FILE - - main.run_debug_from(config_path, args.verbose) + swh_web_config = config.get_config(config_path) + runserver.default_port = swh_web_config['port'] + runserver.default_addr = swh_web_config['host'] + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "swh.web.settings") + django.setup() + management.call_command('runserver') diff --git a/docs/conf.py b/docs/conf.py index 190deb7e..2be220da 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1 +1,7 @@ +import os +import django + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "swh.web.settings") +django.setup() + from swh.docs.sphinx.conf import * # NoQA diff --git a/requirements.txt b/requirements.txt index 22d5ae2e..ed1eb3c2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,16 +1,14 @@ # Add here external Python modules dependencies, one per line. Module names # should match https://pypi.python.org/pypi names. For the full spec or # dependency lines, see https://pip.readthedocs.org/en/1.1/requirements.html # Runtime dependencies -Flask -Flask_Limiter +django +django_rest_framework +django_extensions python-dateutil docutils pygments -redis -hiredis # Test dependencies -#Flask-Testing -#blinker + diff --git a/resources/test/webapp.yml b/resources/test/webapp.yml index 6aa296aa..388b7c9b 100644 --- a/resources/test/webapp.yml +++ b/resources/test/webapp.yml @@ -1,29 +1,21 @@ storage: cls: remote args: url: http://localhost:5002/ # where to log information log_dir: /tmp/swh/web-ui/log # for dev only debug: true # current server (0.0.0.0 for world opening) host: 127.0.0.1 # its port port: 5004 # Max revisions shown in a log max_log_revs: 25 -limiter: - global_limits: - - '1000 per minute' - headers_enabled: True - strategy: 'moving-window' - storage_uri: 'memory://' - storage_options: {} - in_memory_fallback: - - '1000 per minute' +limiter_rate: '1000/min' diff --git a/swh/web/ui/__init__.py b/swh/web/api/__init__.py similarity index 100% rename from swh/web/ui/__init__.py rename to swh/web/api/__init__.py diff --git a/swh/web/ui/apidoc.py b/swh/web/api/apidoc.py similarity index 73% rename from swh/web/ui/apidoc.py rename to swh/web/api/apidoc.py index b904e6e7..4f4e551d 100644 --- a/swh/web/ui/apidoc.py +++ b/swh/web/api/apidoc.py @@ -1,350 +1,305 @@ # Copyright (C) 2015-2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import re from collections import defaultdict from functools import wraps from enum import Enum -from flask import render_template, url_for -from flask import g +from django.urls import reverse +from rest_framework.decorators import api_view -from swh.web.ui.main import app +from swh.web.api.apiurls import APIUrls +from swh.web.api.apiresponse import make_api_response, error_response class argtypes(Enum): # noqa: N801 """Class for centralizing argument type descriptions """ ts = 'timestamp' int = 'integer' str = 'string' path = 'path' sha1 = 'sha1' uuid = 'uuid' sha1_git = 'sha1_git' algo_and_hash = 'hash_type:hash' class rettypes(Enum): # noqa: N801 """Class for centralizing return type descriptions """ octet_stream = 'octet stream' list = 'list' dict = 'dict' class excs(Enum): # noqa: N801 """Class for centralizing exception type descriptions """ badinput = 'BadInputExc' notfound = 'NotFoundExc' -class APIUrls(object): - """ - Class to manage API documentation URLs. - * Indexes all routes documented using apidoc's decorators. - * Tracks endpoint/request processing method relationships for use - in generating related urls in API documentation - Relies on the load_controllers logic in main.py for initialization. - - """ - apidoc_routes = {} - method_endpoints = {} - - @classmethod - def get_app_endpoints(cls): - return cls.apidoc_routes - - @classmethod - def get_method_endpoints(cls, fname): - if len(cls.method_endpoints) == 0: - cls.method_endpoints = cls.group_routes_by_method() - return cls.method_endpoints[fname] - - @classmethod - def group_routes_by_method(cls): - """ - Group URL endpoints according to their processing method. - Returns: - A dict where keys are the processing method names, and values - are the routes that are bound to the key method. - """ - endpoints = {} - for rule in app.url_map.iter_rules(): - rule_dict = {'rule': rule.rule, - 'methods': rule.methods} - if rule.endpoint not in endpoints: - endpoints[rule.endpoint] = [rule_dict] - else: - endpoints[rule.endpoint].append(rule_dict) - return endpoints - - @classmethod - def index_add_route(cls, route, docstring, **kwargs): - """ - Add a route to the self-documenting API reference - """ - if route not in cls.apidoc_routes: - d = {'docstring': docstring} - for k, v in kwargs.items(): - d[k] = v - cls.apidoc_routes[route] = d - - class APIDocException(Exception): """ Custom exception to signal errors in the use of the APIDoc decorators """ class route(object): # noqa: N801 """Decorate an API method to register it in the API doc route index and create the corresponding Flask route. This decorator is responsible for bootstrapping the linking of subsequent decorators, as well as traversing the decorator stack to obtain the documentation data from it. Args: route: documentation page's route noargs: set to True if the route has no arguments, and its result should be displayed anytime its documentation is requested. Default to False hidden: set to True to remove the endpoint from being listed in the /api endpoints. Default to False. tags: Further information on api endpoints. Two values are possibly expected: - hidden: remove the entry points from the listing - upcoming: display the entry point but it is not followable """ - def __init__(self, route, noargs=False, tags=[]): + def __init__(self, route, noargs=False, tags=[], handle_response=False, + api_version='1'): super().__init__() self.route = route + self.urlpattern = '^' + api_version + route + '$' self.noargs = noargs self.tags = set(tags) + self.handle_response = handle_response # @apidoc.route() Decorator call def __call__(self, f): # If the route is not hidden, add it to the index if 'hidden' not in self.tags: APIUrls.index_add_route(self.route, f.__doc__, tags=self.tags) # If the decorated route has arguments, we create a specific # documentation view if not self.noargs: - doc_route_name = 'doc_for_%s' % f.__name__ - def doc_view(): + @api_view() + def doc_view(request): doc_data = self.get_doc_data(f) - return app.response_class( - render_template('apidoc.html', **doc_data), - content_type='text/html') + return make_api_response(request, None, doc_data) - app.add_url_rule(self.route, doc_route_name, doc_view, - methods=['GET']) + view_name = self.route[1:-1].replace('/', '-') + APIUrls.index_add_url_pattern(self.urlpattern, doc_view, view_name) - # The documented view only adds the documentation data to the global - # context @wraps(f) - def documented_view(*args, **kwargs): - g.doc_env = self.get_doc_data(f) # Store for response processing - rv = f(*args, **kwargs) - return rv + def documented_view(request, **kwargs): + doc_data = self.get_doc_data(f) + + try: + rv = f(request, **kwargs) + except Exception as exc: + return error_response(request, exc, doc_data) + + if self.handle_response: + return rv + else: + return make_api_response(request, rv, doc_data) return documented_view def filter_api_url(self, endpoint, route_re, noargs): doc_methods = {'GET', 'HEAD', 'OPTIONS'} if re.match(route_re, endpoint['rule']): if endpoint['methods'] == doc_methods and not noargs: return False return True - def build_examples(self, f, urls, args): + def build_examples(self, urls, args): """Build example documentation. Args: f: function urls: information relative to url for that function args: information relative to arguments for that function Yields: example based on default parameter value if any """ s = set() r = [] for data_url in urls: url = data_url['rule'] defaults = {arg['name']: arg['default'] for arg in args if arg['name'] in url} - if defaults: - url = url_for(f.__name__, **defaults) + if defaults and None not in defaults.values(): + url = reverse(data_url['name'], kwargs=defaults) if url in s: continue s.add(url) r.append(url) return r def get_doc_data(self, f): """Build documentation data for the decorated function""" data = { 'route': self.route, 'noargs': self.noargs, } data.update(getattr(f, 'doc_data', {})) if not f.__doc__: raise APIDocException('Apidoc %s: expected a docstring' ' for function %s' % (self.__class__.__name__, f.__name__)) data['docstring'] = f.__doc__ route_re = re.compile('.*%s$' % data['route']) - endpoint_list = APIUrls.get_method_endpoints(f.__name__) + endpoint_list = APIUrls.get_method_endpoints(f) data['urls'] = [url for url in endpoint_list if self.filter_api_url(url, route_re, data['noargs'])] if 'args' in data: - data['examples'] = self.build_examples( - f, data['urls'], data['args']) + data['examples'] = self.build_examples(data['urls'], data['args']) data['heading'] = '%s Documentation' % data['route'] return data class DocData(object): """Base description of optional input/output setup for a route. """ destination = None def __init__(self): self.doc_data = {} def __call__(self, f): if not hasattr(f, 'doc_data'): f.doc_data = defaultdict(list) f.doc_data[self.destination].append(self.doc_data) return f class arg(DocData): # noqa: N801 """ Decorate an API method to display an argument's information on the doc page specified by @route above. Args: name: the argument's name. MUST match the method argument's name to create the example request URL. default: the argument's default value argtype: the argument's type as an Enum value from apidoc.argtypes argdoc: the argument's documentation string """ destination = 'args' def __init__(self, name, default, argtype, argdoc): super().__init__() self.doc_data = { 'name': name, 'type': argtype.value, 'doc': argdoc, 'default': default } class header(DocData): # noqa: N801 """ Decorate an API method to display header information the api can potentially return in the response. Args: name: the header name doc: the information about that header """ destination = 'headers' def __init__(self, name, doc): super().__init__() self.doc_data = { 'name': name, 'doc': doc, } class param(DocData): # noqa: N801 """Decorate an API method to display query parameter information the api can potentially accept. Args: name: parameter's name default: parameter's default value argtype: parameter's type as an Enum value from apidoc.argtypes doc: the information about that header """ destination = 'params' def __init__(self, name, default, argtype, doc): super().__init__() self.doc_data = { 'name': name, 'type': argtype.value, 'default': default, 'doc': doc, } class raises(DocData): # noqa: N801 """Decorate an API method to display information pertaining to an exception that can be raised by this method. Args: exc: the exception name as an Enum value from apidoc.excs doc: the exception's documentation string """ destination = 'excs' def __init__(self, exc, doc): super().__init__() self.doc_data = { 'exc': exc.value, 'doc': doc } class returns(DocData): # noqa: N801 """Decorate an API method to display information about its return value. Args: rettype: the return value's type as an Enum value from apidoc.rettypes retdoc: the return value's documentation string """ destination = 'returns' def __init__(self, rettype=None, retdoc=None): super().__init__() self.doc_data = { 'type': rettype.value, 'doc': retdoc } diff --git a/swh/web/api/apiresponse.py b/swh/web/api/apiresponse.py new file mode 100644 index 00000000..4e0f7af2 --- /dev/null +++ b/swh/web/api/apiresponse.py @@ -0,0 +1,173 @@ +# Copyright (C) 2017 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import json + +from rest_framework.response import Response + +from swh.storage.exc import StorageDBError, StorageAPIError + +from swh.web.api import utils +from swh.web.api.exc import NotFoundExc, ForbiddenExc + + +def compute_link_header(rv, options): + """Add Link header in returned value results. + + Expects rv to be a dict with 'results' and 'headers' key: + 'results': the returned value expected to be shown + 'headers': dictionary with link-next and link-prev + + Args: + rv (dict): with keys: + - 'headers': potential headers with 'link-next' + and 'link-prev' keys + - 'results': containing the result to return + options (dict): the initial dict to update with result if any + + Returns: + Dict with optional keys 'link-next' and 'link-prev'. + + """ + link_headers = [] + + if 'headers' not in rv: + return {} + + rv_headers = rv['headers'] + + if 'link-next' in rv_headers: + link_headers.append('<%s>; rel="next"' % ( + rv_headers['link-next'])) + if 'link-prev' in rv_headers: + link_headers.append('<%s>; rel="previous"' % ( + rv_headers['link-prev'])) + + if link_headers: + link_header_str = ','.join(link_headers) + headers = options.get('headers', {}) + headers.update({ + 'Link': link_header_str + }) + return headers + + return {} + + +def filter_by_fields(request, data): + """Extract a request parameter 'fields' if it exists to permit the + filtering on the data dict's keys. + + If such field is not provided, returns the data as is. + + """ + fields = request.query_params.get('fields') + if fields: + fields = set(fields.split(',')) + data = utils.filter_field_keys(data, fields) + + return data + + +def transform(rv): + """Transform an eventual returned value with multiple layer of + information with only what's necessary. + + If the returned value rv contains the 'results' key, this is the + associated value which is returned. + + Otherwise, return the initial dict without the potential 'headers' + key. + + """ + if 'results' in rv: + return rv['results'] + + if 'headers' in rv: + rv.pop('headers') + + return rv + + +def make_api_response(request, data, doc_data={}, options={}): + """Generates an API response based on the requested mimetype. + + Args: + request: a DRF Request object + data: raw data to return in the API response + doc_data: documentation data for HTML response + options: optionnal data that can be used to generate the response + + Returns: + a DRF Response a object + + """ + if data: + options['headers'] = compute_link_header(data, options) + data = transform(data) + data = filter_by_fields(request, data) + doc_env = doc_data + headers = {} + if 'headers' in options: + doc_env['headers_data'] = options['headers'] + headers = options['headers'] + + # get request status code + doc_env['status_code'] = options.get('status', 200) + + response_args = {'status': doc_env['status_code'], + 'headers': headers, + 'content_type': request.accepted_media_type} + + # when requesting HTML, typically when browsing the API through its + # documented views, we need to enrich the input data with documentation + # related ones and inform DRF that we request HTML template rendering + if request.accepted_media_type == 'text/html': + + if data: + data = json.dumps(data, sort_keys=True, + indent=4, + separators=(',', ': ')) + doc_env['response_data'] = data + doc_env['request'] = request + doc_env['heading'] = utils.shorten_path(str(request.path)) + + response_args['data'] = doc_env + response_args['template_name'] = 'apidoc.html' + + # otherwise simply return the raw data and let DRF picks + # the correct renderer (JSON or YAML) + else: + response_args['data'] = data + + return Response(**response_args) + + +def error_response(request, error, doc_data): + """Private function to create a custom error response. + + Args: + request: a DRF Request object + error: the exception that caused the error + doc_data: documentation data for HTML response + """ + error_code = 400 + if isinstance(error, NotFoundExc): + error_code = 404 + elif isinstance(error, ForbiddenExc): + error_code = 403 + elif isinstance(error, StorageDBError): + error_code = 503 + elif isinstance(error, StorageAPIError): + error_code = 503 + + error_opts = {'status': error_code} + error_data = { + 'exception': error.__class__.__name__, + 'reason': str(error), + } + + return make_api_response(request, error_data, doc_data, + options=error_opts) diff --git a/swh/web/api/apiurls.py b/swh/web/api/apiurls.py new file mode 100644 index 00000000..734c7921 --- /dev/null +++ b/swh/web/api/apiurls.py @@ -0,0 +1,124 @@ +# Copyright (C) 2017 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import re + +from django.conf.urls import url +from rest_framework.decorators import api_view + + +class APIUrls(object): + """ + Class to manage API documentation URLs. + * Indexes all routes documented using apidoc's decorators. + * Tracks endpoint/request processing method relationships for use + in generating related urls in API documentation + """ + apidoc_routes = {} + method_endpoints = {} + urlpatterns = [] + + @classmethod + def get_app_endpoints(cls): + return cls.apidoc_routes + + @classmethod + def get_method_endpoints(cls, f): + if f.__name__ not in cls.method_endpoints: + cls.method_endpoints[f.__name__] = cls.group_routes_by_method(f) + return cls.method_endpoints[f.__name__] + + @classmethod + def group_routes_by_method(cls, f): + """ + Group URL endpoints according to their processing method. + Returns: + A dict where keys are the processing method names, and values + are the routes that are bound to the key method. + """ + rules = [] + for urlp in cls.urlpatterns: + endpoint = urlp.callback.__name__ + if endpoint != f.__name__: + continue + method_names = urlp.callback.view_class.http_method_names + url_rule = urlp.regex.pattern.replace('^', '/').replace('$', '') + url_rule_params = re.findall('\([^)]+\)', url_rule) + for param in url_rule_params: + param_name = re.findall('<(.*)>', param) + param_name = param_name[0] if len(param_name) > 0 else None + if param_name and hasattr(f, 'doc_data'): + param_index = \ + next(i for (i, d) in enumerate(f.doc_data['args']) + if d['name'] == param_name) + if param_index is not None: + url_rule = url_rule.replace( + param, '<' + + f.doc_data['args'][param_index]['name'] + + ': ' + f.doc_data['args'][param_index]['type'] + + '>') + rule_dict = {'rule': '/api' + url_rule, + 'name': urlp.name, + 'methods': {method.upper() for method in method_names} + } + rules.append(rule_dict) + + return rules + + @classmethod + def index_add_route(cls, route, docstring, **kwargs): + """ + Add a route to the self-documenting API reference + """ + route_view_name = route[1:-1].replace('/', '-') + if route not in cls.apidoc_routes: + d = {'docstring': docstring, + 'route_view_name': route_view_name} + for k, v in kwargs.items(): + d[k] = v + cls.apidoc_routes[route] = d + + @classmethod + def index_add_url_pattern(cls, url_pattern, view, view_name): + cls.urlpatterns.append(url(url_pattern, view, name=view_name)) + + @classmethod + def get_url_patterns(cls): + return cls.urlpatterns + + +class api_route(object): # noqa: N801 + """ + Decorator to ease the registration of an API endpoint + using the Django REST Framework. + + Args: + url_pattern: the url pattern used by DRF to identify the API route + view_name: the name of the API view associated to the route + used to reverse the url + methods: array of HTTP methods supported by the API route + + """ + + def __init__(self, url_pattern=None, view_name=None, + methods=['GET', 'HEAD'], api_version='1'): + super().__init__() + self.url_pattern = '^' + api_version + url_pattern + '$' + self.view_name = view_name + self.methods = methods + + def __call__(self, f): + + # create a DRF view from the wrapped function + @api_view(self.methods) + def api_view_f(*args, **kwargs): + return f(*args, **kwargs) + # small hack for correctly generating API endpoints index doc + api_view_f.__name__ = f.__name__ + + # register the route and its view in the endpoints index + APIUrls.index_add_url_pattern(self.url_pattern, api_view_f, + self.view_name) + return f diff --git a/swh/web/ui/backend.py b/swh/web/api/backend.py similarity index 79% rename from swh/web/ui/backend.py rename to swh/web/api/backend.py index 023a84bd..fbafc7e0 100644 --- a/swh/web/ui/backend.py +++ b/swh/web/api/backend.py @@ -1,357 +1,357 @@ # Copyright (C) 2015-2016 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import os -from . import main +from swh.web import config MAX_LIMIT = 50 # Top limit the users can ask for def content_get(sha1_bin): """Lookup the content designed by {algo: hash_bin}. Args: sha1_bin: content's binary sha1. Returns: Content as dict with 'sha1' and 'data' keys. data representing its raw data. """ - contents = main.storage().content_get([sha1_bin]) + contents = config.storage().content_get([sha1_bin]) if contents and len(contents) >= 1: return contents[0] return None def content_find(algo, hash_bin): """Retrieve the content with binary hash hash_bin Args: algo: nature of the hash hash_bin. hash_bin: content's hash searched for. Returns: A dict with keys sha1, sha1_git, sha256, ... if the content exist or None otherwise. """ - return main.storage().content_find({algo: hash_bin}) + return config.storage().content_find({algo: hash_bin}) def content_find_provenance(algo, hash_bin): """Find the content's provenance information. Args: algo: nature of the hash hash_bin. hash_bin: content's hash corresponding to algo searched for. Yields: Yields the list of provenance information for that content if any (this can be empty if the cache is not populated) """ - return main.storage().content_find_provenance({algo: hash_bin}) + return config.storage().content_find_provenance({algo: hash_bin}) def content_ctags_get(id): """Retrieve the list of ctags symbols for a specific sha1. Args: id (bytes): content's hash identifier Returns: dict of keys 'id' (bytes) and 'ctags' (list of dict) """ - return list(main.storage().content_ctags_get([id])) + return list(config.storage().content_ctags_get([id])) def content_ctags_search(expression, last_sha1, limit=10): """Lookup the content designed by {algo: hash_bin}. Args: expression (str): Expression to lookup in indexed raw content last_sha1 (str): Last hash limit (int): Number of elements per page Returns: sha1 whose indexed content match the expression """ limit = min(limit, MAX_LIMIT) - return main.storage().content_ctags_search(expression, - last_sha1=last_sha1, - limit=limit) + return config.storage().content_ctags_search(expression, + last_sha1=last_sha1, + limit=limit) def content_filetype_get(id): """Retrieve content's filetype information. """ - r = list(main.storage().content_mimetype_get([id])) + r = list(config.storage().content_mimetype_get([id])) if not r: return None return r[0] def content_language_get(id): """Retrieve content's language information. """ - r = list(main.storage().content_language_get([id])) + r = list(config.storage().content_language_get([id])) if not r: return None return r[0] def content_license_get(id): """Retrieve content's license information. """ - r = list(main.storage().content_fossology_license_get([id])) + r = list(config.storage().content_fossology_license_get([id])) if not r: return None return r[0] def content_missing_per_sha1(sha1list): """List content missing from storage based on sha1 Args: sha1s: Iterable of sha1 to check for absence Returns: an iterable of sha1s missing from the storage """ - return main.storage().content_missing_per_sha1(sha1list) + return config.storage().content_missing_per_sha1(sha1list) def directory_get(sha1_bin): """Retrieve information on one directory. Args: sha1_bin: Directory's identifier Returns: The directory's information. """ - res = main.storage().directory_get([sha1_bin]) + res = config.storage().directory_get([sha1_bin]) if res and len(res) >= 1: return res[0] def origin_get(origin): """Return information about the origin matching dict origin. Args: origin: origin's dict with keys either 'id' or ('type' AND 'url') Returns: Origin information as dict. """ - return main.storage().origin_get(origin) + return config.storage().origin_get(origin) def person_get(person_id): """Return information about the person with id person_id. Args: person_id: person's identifier. Returns: Person information as dict. """ - res = main.storage().person_get([person_id]) + res = config.storage().person_get([person_id]) if res and len(res) >= 1: return res[0] def directory_ls(sha1_git_bin, recursive=False): """Return information about the directory with id sha1_git. Args: sha1_git: directory's identifier. recursive: Optional recursive flag default to False Returns: Directory information as dict. """ - directory_entries = main.storage().directory_ls(sha1_git_bin, recursive) + directory_entries = config.storage().directory_ls(sha1_git_bin, recursive) if not directory_entries: return [] return directory_entries def release_get(sha1_git_bin): """Return information about the release with sha1 sha1_git_bin. Args: sha1_git_bin: The release's sha1 as bytes. Returns: Release information as dict if found, None otherwise. Raises: ValueError if the identifier provided is not of sha1 nature. """ - res = main.storage().release_get([sha1_git_bin]) + res = config.storage().release_get([sha1_git_bin]) if res and len(res) >= 1: return res[0] return None def revision_get(sha1_git_bin): """Return information about the revision with sha1 sha1_git_bin. Args: sha1_git_bin: The revision's sha1 as bytes. Returns: Revision information as dict if found, None otherwise. Raises: ValueError if the identifier provided is not of sha1 nature. """ - res = main.storage().revision_get([sha1_git_bin]) + res = config.storage().revision_get([sha1_git_bin]) if res and len(res) >= 1: return res[0] return None def revision_get_multiple(sha1_git_bin_list): """Return information about the revisions in sha1_git_bin_list Args: sha1_git_bin_list: The revisions' sha1s as a list of bytes. Returns: Revisions' information as an iterable of dicts if any found, an empty list otherwise Raises: ValueError if the identifier provided is not of sha1 nature. """ - res = main.storage().revision_get(sha1_git_bin_list) + res = config.storage().revision_get(sha1_git_bin_list) if res and len(res) >= 1: return res return [] def revision_log(sha1_git_bin, limit): """Return information about the revision with sha1 sha1_git_bin. Args: sha1_git_bin: The revision's sha1 as bytes. limit: the maximum number of revisions returned. Returns: Revision information as dict if found, None otherwise. Raises: ValueError if the identifier provided is not of sha1 nature. """ - return main.storage().revision_log([sha1_git_bin], limit) + return config.storage().revision_log([sha1_git_bin], limit) def revision_log_by(origin_id, branch_name, ts, limit): """Return information about the revision matching the timestamp ts, from origin origin_id, in branch branch_name. Args: origin_id: origin of the revision - branch_name: revision's branch. - timestamp: revision's time frame. Returns: Information for the revision matching the criterions. """ - return main.storage().revision_log_by(origin_id, - branch_name, - ts, - limit=limit) + return config.storage().revision_log_by(origin_id, + branch_name, + ts, + limit=limit) def stat_counters(): """Return the stat counters for Software Heritage Returns: A dict mapping textual labels to integer values. """ - return main.storage().stat_counters() + return config.storage().stat_counters() def lookup_origin_visits(origin_id, last_visit=None, limit=10): """Yields the origin origin_ids' visits. Args: origin_id (int): origin to list visits for last_visit (int): last visit to lookup from limit (int): Number of elements max to display Yields: Dictionaries of origin_visit for that origin """ limit = min(limit, MAX_LIMIT) - yield from main.storage().origin_visit_get( + yield from config.storage().origin_visit_get( origin_id, last_visit=last_visit, limit=limit) def lookup_origin_visit(origin_id, visit_id): """Return information about visit visit_id with origin origin_id. Args: origin_id: origin concerned by the visit visit_id: the visit identifier to lookup Yields: The dict origin_visit concerned """ - return main.storage().origin_visit_get_by(origin_id, visit_id) + return config.storage().origin_visit_get_by(origin_id, visit_id) def revision_get_by(origin_id, branch_name, timestamp): """Return occurrence information matching the criterions origin_id, branch_name, ts. """ - res = main.storage().revision_get_by(origin_id, - branch_name, - timestamp=timestamp, - limit=1) + res = config.storage().revision_get_by(origin_id, + branch_name, + timestamp=timestamp, + limit=1) if not res: return None return res[0] def directory_entry_get_by_path(directory, path): """Return a directory entry by its path. """ paths = path.strip(os.path.sep).split(os.path.sep) - return main.storage().directory_entry_get_by_path( + return config.storage().directory_entry_get_by_path( directory, list(map(lambda p: p.encode('utf-8'), paths))) def entity_get(uuid): """Retrieve the entity per its uuid. """ - return main.storage().entity_get(uuid) + return config.storage().entity_get(uuid) diff --git a/swh/web/ui/converters.py b/swh/web/api/converters.py similarity index 99% rename from swh/web/ui/converters.py rename to swh/web/api/converters.py index 3f0bd387..5bf18181 100644 --- a/swh/web/ui/converters.py +++ b/swh/web/api/converters.py @@ -1,312 +1,312 @@ # Copyright (C) 2015-2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import json from swh.model import hashutil from swh.core.utils import decode_with_escape -from swh.web.ui import utils +from swh.web.api import utils def from_swh(dict_swh, hashess={}, bytess={}, dates={}, blacklist={}, removables_if_empty={}, empty_dict={}, empty_list={}, convert={}, convert_fn=lambda x: x): """Convert from an swh dictionary to something reasonably json serializable. Args: - dict_swh: the origin dictionary needed to be transformed - hashess: list/set of keys representing hashes values (sha1, sha256, sha1_git, etc...) as bytes. Those need to be transformed in hexadecimal string - bytess: list/set of keys representing bytes values which needs to be decoded - blacklist: set of keys to filter out from the conversion - convert: set of keys whose associated values need to be converted using convert_fn - convert_fn: the conversion function to apply on the value of key in 'convert' The remaining keys are copied as is in the output. Returns: dictionary equivalent as dict_swh only with its keys `converted`. """ def convert_hashes_bytes(v): """v is supposedly a hash as bytes, returns it converted in hex. """ if isinstance(v, bytes): return hashutil.hash_to_hex(v) return v def convert_bytes(v): """v is supposedly a bytes string, decode as utf-8. FIXME: Improve decoding policy. If not utf-8, break! """ if isinstance(v, bytes): return v.decode('utf-8') return v def convert_date(v): """v is either: - a dict with three keys: - timestamp (dict or integer timestamp) - offset - negative_utc - a datetime We convert it to a human-readable string """ if isinstance(v, datetime.datetime): return v.isoformat() tz = datetime.timezone(datetime.timedelta(minutes=v['offset'])) swh_timestamp = v['timestamp'] if isinstance(swh_timestamp, dict): date = datetime.datetime.fromtimestamp( swh_timestamp['seconds'], tz=tz) else: date = datetime.datetime.fromtimestamp( swh_timestamp, tz=tz) datestr = date.isoformat() if v['offset'] == 0 and v['negative_utc']: # remove the rightmost + and replace it with a - return '-'.join(datestr.rsplit('+', 1)) return datestr if not dict_swh: return dict_swh new_dict = {} for key, value in dict_swh.items(): if key in blacklist or (key in removables_if_empty and not value): continue if key in dates: new_dict[key] = convert_date(value) elif key in convert: new_dict[key] = convert_fn(value) elif isinstance(value, dict): new_dict[key] = from_swh(value, hashess=hashess, bytess=bytess, dates=dates, blacklist=blacklist, removables_if_empty=removables_if_empty, empty_dict=empty_dict, empty_list=empty_list, convert=convert, convert_fn=convert_fn) elif key in hashess: new_dict[key] = utils.fmap(convert_hashes_bytes, value) elif key in bytess: try: new_dict[key] = utils.fmap(convert_bytes, value) except UnicodeDecodeError: if 'decoding_failures' not in new_dict: new_dict['decoding_failures'] = [key] else: new_dict['decoding_failures'].append(key) new_dict[key] = utils.fmap(decode_with_escape, value) elif key in empty_dict and not value: new_dict[key] = {} elif key in empty_list and not value: new_dict[key] = [] else: new_dict[key] = value return new_dict def from_provenance(provenance): """Convert from a provenance information to a provenance dictionary. Args: provenance: Dictionary with the following keys: content (sha1_git) : the content's identifier revision (sha1_git) : the revision the content was seen origin (int) : the origin the content was seen visit (int) : the visit it occurred path (bytes) : the path the content was seen at """ return from_swh(provenance, hashess={'content', 'revision'}, bytess={'path'}) def from_origin(origin): """Convert from an SWH origin to an origin dictionary. """ return from_swh(origin, removables_if_empty={'lister', 'project'}) def from_release(release): """Convert from an SWH release to a json serializable release dictionary. Args: release: Dict with the following keys - id: identifier of the revision (sha1 in bytes) - revision: identifier of the revision the release points to (sha1 in bytes) - comment: release's comment message (bytes) - name: release's name (string) - author: release's author identifier (swh's id) - synthetic: the synthetic property (boolean) Returns: Release dictionary with the following keys: - id: hexadecimal sha1 (string) - revision: hexadecimal sha1 (string) - comment: release's comment message (string) - name: release's name (string) - author: release's author identifier (swh's id) - synthetic: the synthetic property (boolean) """ return from_swh( release, hashess={'id', 'target'}, bytess={'message', 'name', 'fullname', 'email'}, dates={'date'}, ) class SWHMetadataEncoder(json.JSONEncoder): """Special json encoder for metadata field which can contain bytes encoded value. """ def default(self, obj): if isinstance(obj, bytes): return obj.decode('utf-8') # Let the base class default method raise the TypeError return json.JSONEncoder.default(self, obj) def convert_revision_metadata(metadata): """Convert json specific dict to a json serializable one. """ if not metadata: return {} return json.loads(json.dumps(metadata, cls=SWHMetadataEncoder)) def from_revision(revision): """Convert from an SWH revision to a json serializable revision dictionary. Args: revision: Dict with the following keys - id: identifier of the revision (sha1 in bytes) - directory: identifier of the directory the revision points to (sha1 in bytes) - author_name, author_email: author's revision name and email - committer_name, committer_email: committer's revision name and email - message: revision's message - date, date_offset: revision's author date - committer_date, committer_date_offset: revision's commit date - parents: list of parents for such revision - synthetic: revision's property nature - type: revision's type (git, tar or dsc at the moment) - metadata: if the revision is synthetic, this can reference dynamic properties. Returns: Revision dictionary with the same keys as inputs, only: - sha1s are in hexadecimal strings (id, directory) - bytes are decoded in string (author_name, committer_name, author_email, committer_email) - remaining keys are left as is """ revision = from_swh(revision, hashess={'id', 'directory', 'parents', 'children'}, bytess={'name', 'fullname', 'email'}, convert={'metadata'}, convert_fn=convert_revision_metadata, dates={'date', 'committer_date'}) if revision: if 'parents' in revision: revision['merge'] = len(revision['parents']) > 1 if 'message' in revision: try: revision['message'] = revision['message'].decode('utf-8') except UnicodeDecodeError: revision['message_decoding_failed'] = True revision['message'] = None return revision def from_content(content): """Convert swh content to serializable content dictionary. """ return from_swh(content, hashess={'sha1', 'sha1_git', 'sha256', 'blake2s256'}, blacklist={'ctime'}, convert={'status'}, convert_fn=lambda v: 'absent' if v == 'hidden' else v) def from_person(person): """Convert swh person to serializable person dictionary. """ return from_swh(person, bytess={'name', 'fullname', 'email'}) def from_origin_visit(visit): """Convert swh origin_visit to serializable origin_visit dictionary. """ ov = from_swh(visit, hashess={'target'}, bytess={'branch'}, dates={'date'}, empty_dict={'metadata'}) if ov and 'occurrences' in ov: ov['occurrences'] = { decode_with_escape(k): v for k, v in ov['occurrences'].items() } return ov def from_directory_entry(dir_entry): """Convert swh person to serializable person dictionary. """ return from_swh(dir_entry, hashess={'dir_id', 'sha1_git', 'sha1', 'sha256', 'target'}, bytess={'name'}, removables_if_empty={ 'sha1', 'sha1_git', 'sha256', 'status'}, convert={'status'}, convert_fn=lambda v: 'absent' if v == 'hidden' else v) def from_filetype(content_entry): """Convert swh person to serializable person dictionary. """ return from_swh(content_entry, hashess={'id'}, bytess={'mimetype', 'encoding'}) diff --git a/swh/web/ui/exc.py b/swh/web/api/exc.py similarity index 100% rename from swh/web/ui/exc.py rename to swh/web/api/exc.py diff --git a/swh/web/ui/query.py b/swh/web/api/query.py similarity index 98% rename from swh/web/ui/query.py rename to swh/web/api/query.py index 2877cea3..7e95180b 100644 --- a/swh/web/ui/query.py +++ b/swh/web/api/query.py @@ -1,111 +1,111 @@ # Copyright (C) 2015-2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import re from uuid import UUID from swh.model.hashutil import ALGORITHMS, hash_to_bytes -from swh.web.ui.exc import BadInputExc +from swh.web.api.exc import BadInputExc SHA256_RE = re.compile(r'^[0-9a-f]{64}$', re.IGNORECASE) SHA1_RE = re.compile(r'^[0-9a-f]{40}$', re.IGNORECASE) def parse_hash(q): """Detect the hash type of a user submitted query string. Args: query string with the following format: "[HASH_TYPE:]HEX_CHECKSUM", where HASH_TYPE is optional, defaults to "sha1", and can be one of swh.model.hashutil.ALGORITHMS Returns: A pair (hash_algorithm, byte hash value) Raises: ValueError if the given query string does not correspond to a valid hash value """ def guess_algo(q): if SHA1_RE.match(q): return 'sha1' elif SHA256_RE.match(q): return 'sha256' else: raise BadInputExc('Invalid checksum query string %s' % q) def check_algo(algo, hex): if (algo in {'sha1', 'sha1_git'} and not SHA1_RE.match(hex)) \ or (algo == 'sha256' and not SHA256_RE.match(hex)): raise BadInputExc('Invalid hash %s for algorithm %s' % (hex, algo)) parts = q.split(':') if len(parts) > 2: raise BadInputExc('Invalid checksum query string %s' % q) elif len(parts) == 1: parts = (guess_algo(q), q) elif len(parts) == 2: check_algo(parts[0], parts[1]) algo = parts[0] if algo not in ALGORITHMS: raise BadInputExc('Unknown hash algorithm %s' % algo) return (algo, hash_to_bytes(parts[1])) def parse_hash_with_algorithms_or_throws(q, accepted_algo, error_msg): """Parse a query but only accepts accepted_algo. Otherwise, raise the exception with message error_msg. Args: - q: query string with the following format: "[HASH_TYPE:]HEX_CHECKSUM" where HASH_TYPE is optional, defaults to "sha1", and can be one of swh.model.hashutil.ALGORITHMS. - accepted_algo: array of strings representing the names of accepted algorithms. - error_msg: error message to raise as BadInputExc if the algo of the query does not match. Returns: A pair (hash_algorithm, byte hash value) Raises: BadInputExc when the inputs is invalid or does not validate the accepted algorithms. """ algo, hash = parse_hash(q) if algo not in accepted_algo: raise BadInputExc(error_msg) return (algo, hash) def parse_uuid4(uuid): """Parse an uuid 4 from a string. Args: uuid: String representing an uuid. Returns: The uuid as is if everything is ok. Raises: BadInputExc: if the uuid is invalid. """ try: UUID(uuid, version=4) except ValueError as e: # not a valid hex code for a UUID raise BadInputExc(str(e)) return uuid diff --git a/swh/web/ui/service.py b/swh/web/api/service.py similarity index 99% rename from swh/web/ui/service.py rename to swh/web/api/service.py index cd3e1b58..7ac61981 100644 --- a/swh/web/ui/service.py +++ b/swh/web/api/service.py @@ -1,769 +1,771 @@ # Copyright (C) 2015-2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from collections import defaultdict from swh.model import hashutil -from swh.web.ui import converters, query, backend -from swh.web.ui.exc import NotFoundExc + +from swh.web.api import converters +from swh.web.api import query, backend +from swh.web.api.exc import NotFoundExc def lookup_multiple_hashes(hashes): """Lookup the passed hashes in a single DB connection, using batch processing. Args: An array of {filename: X, sha1: Y}, string X, hex sha1 string Y. Returns: The same array with elements updated with elem['found'] = true if the hash is present in storage, elem['found'] = false if not. """ hashlist = [hashutil.hash_to_bytes(elem['sha1']) for elem in hashes] content_missing = backend.content_missing_per_sha1(hashlist) missing = [hashutil.hash_to_hex(x) for x in content_missing] for x in hashes: x.update({'found': True}) for h in hashes: if h['sha1'] in missing: h['found'] = False return hashes def lookup_expression(expression, last_sha1, per_page): """Lookup expression in raw content. Args: expression (str): An expression to lookup through raw indexed content last_sha1 (str): Last sha1 seen per_page (int): Number of results per page Returns: List of ctags whose content match the expression """ for ctag in backend.content_ctags_search(expression, last_sha1, per_page): ctag = converters.from_swh(ctag, hashess={'id'}) ctag['sha1'] = ctag['id'] ctag.pop('id') yield ctag def lookup_hash(q): """Checks if the storage contains a given content checksum Args: query string of the form Returns: Dict with key found containing the hash info if the hash is present, None if not. """ algo, hash = query.parse_hash(q) found = backend.content_find(algo, hash) return {'found': found, 'algo': algo} def search_hash(q): """Checks if the storage contains a given content checksum Args: query string of the form Returns: Dict with key found to True or False, according to whether the checksum is present or not """ algo, hash = query.parse_hash(q) found = backend.content_find(algo, hash) return {'found': found is not None} def lookup_content_provenance(q): """Return provenance information from a specified content. Args: q: query string of the form Yields: provenance information (dict) list if the content is found. """ algo, hash = query.parse_hash(q) provenances = backend.content_find_provenance(algo, hash) if not provenances: return None return (converters.from_provenance(p) for p in provenances) def _lookup_content_sha1(q): """Given a possible input, query for the content's sha1. Args: q: query string of the form Returns: binary sha1 if found or None """ algo, hash = query.parse_hash(q) if algo != 'sha1': hashes = backend.content_find(algo, hash) if not hashes: return None return hashes['sha1'] return hash def lookup_content_ctags(q): """Return ctags information from a specified content. Args: q: query string of the form Yields: ctags information (dict) list if the content is found. """ sha1 = _lookup_content_sha1(q) if not sha1: return None ctags = backend.content_ctags_get(sha1) if not ctags: return None for ctag in ctags: yield converters.from_swh(ctag, hashess={'id'}) def lookup_content_filetype(q): """Return filetype information from a specified content. Args: q: query string of the form Yields: filetype information (dict) list if the content is found. """ sha1 = _lookup_content_sha1(q) if not sha1: return None filetype = backend.content_filetype_get(sha1) if not filetype: return None return converters.from_filetype(filetype) def lookup_content_language(q): """Return language information from a specified content. Args: q: query string of the form Yields: language information (dict) list if the content is found. """ sha1 = _lookup_content_sha1(q) if not sha1: return None lang = backend.content_language_get(sha1) if not lang: return None return converters.from_swh(lang, hashess={'id'}) def lookup_content_license(q): """Return license information from a specified content. Args: q: query string of the form Yields: license information (dict) list if the content is found. """ sha1 = _lookup_content_sha1(q) if not sha1: return None lang = backend.content_license_get(sha1) if not lang: return None return converters.from_swh(lang, hashess={'id'}) def lookup_origin(origin): """Return information about the origin matching dict origin. Args: origin: origin's dict with keys either 'id' or ('type' AND 'url') Returns: origin information as dict. """ return converters.from_origin(backend.origin_get(origin)) def lookup_person(person_id): """Return information about the person with id person_id. Args: person_id as string Returns: person information as dict. """ person = backend.person_get(person_id) return converters.from_person(person) def lookup_directory(sha1_git): """Return information about the directory with id sha1_git. Args: sha1_git as string Returns: directory information as dict. """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( sha1_git, ['sha1'], # HACK: sha1_git really 'Only sha1_git is supported.') dir = backend.directory_get(sha1_git_bin) if not dir: return None directory_entries = backend.directory_ls(sha1_git_bin) return map(converters.from_directory_entry, directory_entries) def lookup_directory_with_path(directory_sha1_git, path_string): """Return directory information for entry with path path_string w.r.t. root directory pointed by directory_sha1_git Args: - directory_sha1_git: sha1_git corresponding to the directory to which we append paths to (hopefully) find the entry - the relative path to the entry starting from the directory pointed by directory_sha1_git Raises: NotFoundExc if the directory entry is not found """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( directory_sha1_git, ['sha1'], 'Only sha1_git is supported.') queried_dir = backend.directory_entry_get_by_path( sha1_git_bin, path_string) if not queried_dir: raise NotFoundExc(('Directory entry with path %s from %s not found') % (path_string, directory_sha1_git)) return converters.from_directory_entry(queried_dir) def lookup_release(release_sha1_git): """Return information about the release with sha1 release_sha1_git. Args: release_sha1_git: The release's sha1 as hexadecimal Returns: Release information as dict. Raises: ValueError if the identifier provided is not of sha1 nature. """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( release_sha1_git, ['sha1'], 'Only sha1_git is supported.') res = backend.release_get(sha1_git_bin) return converters.from_release(res) def lookup_revision(rev_sha1_git): """Return information about the revision with sha1 revision_sha1_git. Args: revision_sha1_git: The revision's sha1 as hexadecimal Returns: Revision information as dict. Raises: ValueError if the identifier provided is not of sha1 nature. """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( rev_sha1_git, ['sha1'], 'Only sha1_git is supported.') revision = backend.revision_get(sha1_git_bin) return converters.from_revision(revision) def lookup_revision_multiple(sha1_git_list): """Return information about the revision with sha1 revision_sha1_git. Args: revision_sha1_git: The revision's sha1 as hexadecimal Returns: Revision information as dict. Raises: ValueError if the identifier provided is not of sha1 nature. """ def to_sha1_bin(sha1_hex): _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( sha1_hex, ['sha1'], 'Only sha1_git is supported.') return sha1_git_bin sha1_bin_list = (to_sha1_bin(x) for x in sha1_git_list) revisions = backend.revision_get_multiple(sha1_bin_list) return (converters.from_revision(x) for x in revisions) def lookup_revision_message(rev_sha1_git): """Return the raw message of the revision with sha1 revision_sha1_git. Args: revision_sha1_git: The revision's sha1 as hexadecimal Returns: Decoded revision message as dict {'message': } Raises: ValueError if the identifier provided is not of sha1 nature. NotFoundExc if the revision is not found, or if it has no message """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( rev_sha1_git, ['sha1'], 'Only sha1_git is supported.') revision = backend.revision_get(sha1_git_bin) if not revision: raise NotFoundExc('Revision with sha1_git %s not found.' % rev_sha1_git) if 'message' not in revision: raise NotFoundExc('No message for revision with sha1_git %s.' % rev_sha1_git) res = {'message': revision['message']} return res def lookup_revision_by(origin_id, branch_name="refs/heads/master", timestamp=None): """Lookup revisions by origin_id, branch_name and timestamp. If: - branch_name is not provided, lookup using 'refs/heads/master' as default. - ts is not provided, use the most recent Args: - origin_id: origin of the revision. - branch_name: revision's branch. - timestamp: revision's time frame. Yields: The revisions matching the criterions. """ res = backend.revision_get_by(origin_id, branch_name, timestamp) return converters.from_revision(res) def lookup_revision_log(rev_sha1_git, limit): """Return information about the revision with sha1 revision_sha1_git. Args: revision_sha1_git: The revision's sha1 as hexadecimal limit: the maximum number of revisions returned Returns: Revision information as dict. Raises: ValueError if the identifier provided is not of sha1 nature. """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( rev_sha1_git, ['sha1'], 'Only sha1_git is supported.') revision_entries = backend.revision_log(sha1_git_bin, limit) return map(converters.from_revision, revision_entries) def lookup_revision_log_by(origin_id, branch_name, timestamp, limit): """Return information about the revision with sha1 revision_sha1_git. Args: origin_id: origin of the revision branch_name: revision's branch timestamp: revision's time frame limit: the maximum number of revisions returned Returns: Revision information as dict. Raises: NotFoundExc if no revision corresponds to the criterion NotFoundExc if the corresponding revision has no log """ revision_entries = backend.revision_log_by(origin_id, branch_name, timestamp, limit) if not revision_entries: return None return map(converters.from_revision, revision_entries) def lookup_revision_with_context_by(origin_id, branch_name, ts, sha1_git, limit=100): """Return information about revision sha1_git, limited to the sub-graph of all transitive parents of sha1_git_root. sha1_git_root being resolved through the lookup of a revision by origin_id, branch_name and ts. In other words, sha1_git is an ancestor of sha1_git_root. Args: - origin_id: origin of the revision. - branch_name: revision's branch. - timestamp: revision's time frame. - sha1_git: one of sha1_git_root's ancestors. - limit: limit the lookup to 100 revisions back. Returns: Pair of (root_revision, revision). Information on sha1_git if it is an ancestor of sha1_git_root including children leading to sha1_git_root Raises: - BadInputExc in case of unknown algo_hash or bad hash. - NotFoundExc if either revision is not found or if sha1_git is not an ancestor of sha1_git_root. """ rev_root = backend.revision_get_by(origin_id, branch_name, ts) if not rev_root: raise NotFoundExc('Revision with (origin_id: %s, branch_name: %s' ', ts: %s) not found.' % (origin_id, branch_name, ts)) return (converters.from_revision(rev_root), lookup_revision_with_context(rev_root, sha1_git, limit)) def lookup_revision_with_context(sha1_git_root, sha1_git, limit=100): """Return information about revision sha1_git, limited to the sub-graph of all transitive parents of sha1_git_root. In other words, sha1_git is an ancestor of sha1_git_root. Args: sha1_git_root: latest revision. The type is either a sha1 (as an hex string) or a non converted dict. sha1_git: one of sha1_git_root's ancestors limit: limit the lookup to 100 revisions back Returns: Information on sha1_git if it is an ancestor of sha1_git_root including children leading to sha1_git_root Raises: BadInputExc in case of unknown algo_hash or bad hash NotFoundExc if either revision is not found or if sha1_git is not an ancestor of sha1_git_root """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( sha1_git, ['sha1'], 'Only sha1_git is supported.') revision = backend.revision_get(sha1_git_bin) if not revision: raise NotFoundExc('Revision %s not found' % sha1_git) if isinstance(sha1_git_root, str): _, sha1_git_root_bin = query.parse_hash_with_algorithms_or_throws( sha1_git_root, ['sha1'], 'Only sha1_git is supported.') revision_root = backend.revision_get(sha1_git_root_bin) if not revision_root: raise NotFoundExc('Revision root %s not found' % sha1_git_root) else: sha1_git_root_bin = sha1_git_root['id'] revision_log = backend.revision_log(sha1_git_root_bin, limit) parents = {} children = defaultdict(list) for rev in revision_log: rev_id = rev['id'] parents[rev_id] = [] for parent_id in rev['parents']: parents[rev_id].append(parent_id) children[parent_id].append(rev_id) if revision['id'] not in parents: raise NotFoundExc('Revision %s is not an ancestor of %s' % (sha1_git, sha1_git_root)) revision['children'] = children[revision['id']] return converters.from_revision(revision) def lookup_directory_with_revision(sha1_git, dir_path=None, with_data=False): """Return information on directory pointed by revision with sha1_git. If dir_path is not provided, display top level directory. Otherwise, display the directory pointed by dir_path (if it exists). Args: sha1_git: revision's hash. dir_path: optional directory pointed to by that revision. with_data: boolean that indicates to retrieve the raw data if the path resolves to a content. Default to False (for the api) Returns: Information on the directory pointed to by that revision. Raises: BadInputExc in case of unknown algo_hash or bad hash. NotFoundExc either if the revision is not found or the path referenced does not exist. NotImplementedError in case of dir_path exists but do not reference a type 'dir' or 'file'. """ _, sha1_git_bin = query.parse_hash_with_algorithms_or_throws( sha1_git, ['sha1'], 'Only sha1_git is supported.') revision = backend.revision_get(sha1_git_bin) if not revision: raise NotFoundExc('Revision %s not found' % sha1_git) dir_sha1_git_bin = revision['directory'] if dir_path: entity = backend.directory_entry_get_by_path(dir_sha1_git_bin, dir_path) if not entity: raise NotFoundExc( "Directory or File '%s' pointed to by revision %s not found" % (dir_path, sha1_git)) else: entity = {'type': 'dir', 'target': dir_sha1_git_bin} if entity['type'] == 'dir': directory_entries = backend.directory_ls(entity['target']) return {'type': 'dir', 'path': '.' if not dir_path else dir_path, 'revision': sha1_git, 'content': map(converters.from_directory_entry, directory_entries)} elif entity['type'] == 'file': # content content = backend.content_find('sha1_git', entity['target']) if with_data: content['data'] = backend.content_get(content['sha1'])['data'] return {'type': 'file', 'path': '.' if not dir_path else dir_path, 'revision': sha1_git, 'content': converters.from_content(content)} else: raise NotImplementedError('Entity of type %s not implemented.' % entity['type']) def lookup_content(q): """Lookup the content designed by q. Args: q: The release's sha1 as hexadecimal """ algo, hash = query.parse_hash(q) c = backend.content_find(algo, hash) return converters.from_content(c) def lookup_content_raw(q): """Lookup the content defined by q. Args: q: query string of the form Returns: dict with 'sha1' and 'data' keys. data representing its raw data decoded. """ algo, hash = query.parse_hash(q) c = backend.content_find(algo, hash) if not c: return None content = backend.content_get(c['sha1']) return converters.from_content(content) def stat_counters(): """Return the stat counters for Software Heritage Returns: A dict mapping textual labels to integer values. """ return backend.stat_counters() def lookup_origin_visits(origin_id, last_visit=None, per_page=10): """Yields the origin origin_ids' visits. Args: origin_id: origin to list visits for Yields: Dictionaries of origin_visit for that origin """ visits = backend.lookup_origin_visits( origin_id, last_visit=last_visit, limit=per_page) for visit in visits: yield converters.from_origin_visit(visit) def lookup_origin_visit(origin_id, visit_id): """Return information about visit visit_id with origin origin_id. Args: origin_id: origin concerned by the visit visit_id: the visit identifier to lookup Yields: The dict origin_visit concerned """ visit = backend.lookup_origin_visit(origin_id, visit_id) return converters.from_origin_visit(visit) def lookup_entity_by_uuid(uuid): """Return the entity's hierarchy from its uuid. Args: uuid: entity's identifier. Returns: List of hierarchy entities from the entity with uuid. """ uuid = query.parse_uuid4(uuid) for entity in backend.entity_get(uuid): entity = converters.from_swh(entity, convert={'last_seen', 'uuid'}, convert_fn=lambda x: str(x)) yield entity def lookup_revision_through(revision, limit=100): """Retrieve a revision from the criterion stored in revision dictionary. Args: revision: Dictionary of criterion to lookup the revision with. Here are the supported combination of possible values: - origin_id, branch_name, ts, sha1_git - origin_id, branch_name, ts - sha1_git_root, sha1_git - sha1_git Returns: None if the revision is not found or the actual revision. """ if 'origin_id' in revision and \ 'branch_name' in revision and \ 'ts' in revision and \ 'sha1_git' in revision: return lookup_revision_with_context_by(revision['origin_id'], revision['branch_name'], revision['ts'], revision['sha1_git'], limit) if 'origin_id' in revision and \ 'branch_name' in revision and \ 'ts' in revision: return lookup_revision_by(revision['origin_id'], revision['branch_name'], revision['ts']) if 'sha1_git_root' in revision and \ 'sha1_git' in revision: return lookup_revision_with_context(revision['sha1_git_root'], revision['sha1_git'], limit) if 'sha1_git' in revision: return lookup_revision(revision['sha1_git']) # this should not happen raise NotImplementedError('Should not happen!') def lookup_directory_through_revision(revision, path=None, limit=100, with_data=False): """Retrieve the directory information from the revision. Args: revision: dictionary of criterion representing a revision to lookup path: directory's path to lookup. limit: optional query parameter to limit the revisions log. (default to 100). For now, note that this limit could impede the transitivity conclusion about sha1_git not being an ancestor of. with_data: indicate to retrieve the content's raw data if path resolves to a content. Returns: The directory pointing to by the revision criterions at path. """ rev = lookup_revision_through(revision, limit) if not rev: raise NotFoundExc('Revision with criterion %s not found!' % revision) return (rev['id'], lookup_directory_with_revision(rev['id'], path, with_data)) diff --git a/swh/web/ui/templates/about.html b/swh/web/api/templates/about.html similarity index 100% rename from swh/web/ui/templates/about.html rename to swh/web/api/templates/about.html diff --git a/swh/web/ui/templates/api-endpoints.html b/swh/web/api/templates/api-endpoints.html similarity index 75% rename from swh/web/ui/templates/api-endpoints.html rename to swh/web/api/templates/api-endpoints.html index 40aa34fb..62dcca97 100644 --- a/swh/web/ui/templates/api-endpoints.html +++ b/swh/web/api/templates/api-endpoints.html @@ -1,69 +1,69 @@ {% extends "layout.html" %} +{% load api_extras %} {% block title %} Endpoints – Software Heritage API {% endblock %} {% block content %}

Below you can find a list of the available endpoints for version 1 of the Software Heritage API. For a more general introduction please refer to the API overview.

Endpoints marked "available" are considered stable for the current version of the API; endpoints marked "upcoming" are work in progress that will be stabilized in the near future.

{% for route, doc in doc_routes %} - - {% if doc['tags']|length > 0 %} - - + + {% if doc.tags|length > 0 %} + + {% else %} - + {% endif %} - {% set doc_intro = doc['docstring'].split('\n\n')[0] %} - + {% endfor %}
Endpoint Status Description
{{ route }}{{ ', '.join(doc['tags']) }}
{% url doc.route_view_name %}{{ doc.tags|join:', ' }}{{ route }}{% url doc.route_view_name %} available{{ doc_intro | safe_docstring_display | safe }}{{ doc.doc_intro | safe_docstring_display | safe }}
{% endblock %} diff --git a/swh/web/ui/templates/api.html b/swh/web/api/templates/api.html similarity index 100% rename from swh/web/ui/templates/api.html rename to swh/web/api/templates/api.html diff --git a/swh/web/ui/templates/apidoc.html b/swh/web/api/templates/apidoc.html similarity index 74% rename from swh/web/ui/templates/apidoc.html rename to swh/web/api/templates/apidoc.html index b5310331..cf519977 100644 --- a/swh/web/ui/templates/apidoc.html +++ b/swh/web/api/templates/apidoc.html @@ -1,136 +1,137 @@ {% extends "layout.html" %} +{% load api_extras %} {% block title %}{{ heading }} – Software Heritage API {% endblock %} {% block content %} {% if docstring %}

Description

{{ docstring | safe_docstring_display | safe }}
{% endif %} {% if response_data and response_data is not none %}

Request

-
{{ request.method }} {{ request.url }}
+
{{ request.method }} {{ request.build_absolute_uri }}

Response

{% if status_code != 200 %}

Status Code

{{ status_code }}
{% endif %} {% if headers_data and headers_data is not none %}

Headers

- {% for header_name, header_value in headers_data.items() %} + {% for header_name, header_value in headers_data.items %}
{{ header_name }} {{ header_value | urlize_header_links | safe }}
{% endfor %} {% endif %}

Body

-
{{ response_data | urlize_api_links | safe }}
+ {{ response_data | highlight_json | urlize_api_links | safe }}
{% endif %}
{% if urls and urls|length > 0 %}
{% for url in urls %} {% endfor %}
URL Allowed Methods
- {{ url['rule'] }} + {{ url.rule }} - {{ url['methods'] | sort | join(', ') }} + {{ url.methods | dictsort:0 | join:', ' }}

{% endif %} {% if args and args|length > 0 %}

Arguments

{% for arg in args %} -
{{ arg['name'] }}: {{ arg['type'] }}
-
{{ arg['doc'] | safe_docstring_display | safe }}
+
{{ arg.name }}: {{ arg.type }}
+
{{ arg.doc | safe_docstring_display | safe }}
{% endfor %}

{% endif %} {% if params and params|length > 0 %}

Parameters

{% for param in params %} -
{{ param['name'] }}: {{ param['type'] }}
-
{{ param['doc'] | safe_docstring_display | safe }}
+
{{ param.name }}: {{ param.type }}
+
{{ param.doc | safe_docstring_display | safe }}
{% endfor %}

{% endif %} {% if headers %}

Headers

{% for header in headers %} -
{{ header['name'] }}: string
-
{{ header['doc'] | safe_docstring_display | safe }}
+
{{ header.name }}: string
+
{{ header.doc | safe_docstring_display | safe }}
{% endfor %}

{% endif %} {% if return %}

Returns

-
{{ return['type'] }}
-
{{ return['doc'] | safe_docstring_display | safe }}
+
{{ return.type }}
+
{{ return.doc | safe_docstring_display | safe }}

{% endif %} {% if excs and excs|length > 0 %}

Errors

{% for exc in excs %} -
{{ exc['exc'] }}
-
{{ exc['doc'] | safe_docstring_display | safe }}
+
{{ exc.exc }}
+
{{ exc.doc | safe_docstring_display | safe }}
{% endfor %}

{% endif %} {% if examples %}

Examples

{% for example in examples %}
{{ example }}
{% endfor %}
{% endif %} {% endblock %} diff --git a/swh/web/ui/templates/browse.html b/swh/web/api/templates/browse.html similarity index 100% rename from swh/web/ui/templates/browse.html rename to swh/web/api/templates/browse.html diff --git a/swh/web/ui/templates/content-with-origin.html b/swh/web/api/templates/content-with-origin.html similarity index 100% rename from swh/web/ui/templates/content-with-origin.html rename to swh/web/api/templates/content-with-origin.html diff --git a/swh/web/ui/templates/content.html b/swh/web/api/templates/content.html similarity index 100% rename from swh/web/ui/templates/content.html rename to swh/web/api/templates/content.html diff --git a/swh/web/ui/templates/directory.html b/swh/web/api/templates/directory.html similarity index 100% rename from swh/web/ui/templates/directory.html rename to swh/web/api/templates/directory.html diff --git a/swh/web/ui/templates/entity.html b/swh/web/api/templates/entity.html similarity index 100% rename from swh/web/ui/templates/entity.html rename to swh/web/api/templates/entity.html diff --git a/swh/web/ui/templates/includes/apidoc-header-toc.html b/swh/web/api/templates/includes/apidoc-header-toc.html similarity index 100% rename from swh/web/ui/templates/includes/apidoc-header-toc.html rename to swh/web/api/templates/includes/apidoc-header-toc.html diff --git a/swh/web/ui/templates/includes/apidoc-header.html b/swh/web/api/templates/includes/apidoc-header.html similarity index 100% rename from swh/web/ui/templates/includes/apidoc-header.html rename to swh/web/api/templates/includes/apidoc-header.html diff --git a/swh/web/ui/templates/includes/apidoc-header.md b/swh/web/api/templates/includes/apidoc-header.md similarity index 100% rename from swh/web/ui/templates/includes/apidoc-header.md rename to swh/web/api/templates/includes/apidoc-header.md diff --git a/swh/web/ui/templates/includes/home-content.html b/swh/web/api/templates/includes/home-content.html similarity index 100% rename from swh/web/ui/templates/includes/home-content.html rename to swh/web/api/templates/includes/home-content.html diff --git a/swh/web/ui/templates/includes/home-directory.html b/swh/web/api/templates/includes/home-directory.html similarity index 100% rename from swh/web/ui/templates/includes/home-directory.html rename to swh/web/api/templates/includes/home-directory.html diff --git a/swh/web/ui/templates/includes/home-origin.html b/swh/web/api/templates/includes/home-origin.html similarity index 100% rename from swh/web/ui/templates/includes/home-origin.html rename to swh/web/api/templates/includes/home-origin.html diff --git a/swh/web/ui/templates/includes/home-revision.html b/swh/web/api/templates/includes/home-revision.html similarity index 100% rename from swh/web/ui/templates/includes/home-revision.html rename to swh/web/api/templates/includes/home-revision.html diff --git a/swh/web/ui/templates/includes/home-search-symbol.html b/swh/web/api/templates/includes/home-search-symbol.html similarity index 100% rename from swh/web/ui/templates/includes/home-search-symbol.html rename to swh/web/api/templates/includes/home-search-symbol.html diff --git a/swh/web/ui/templates/includes/search-form.html b/swh/web/api/templates/includes/search-form.html similarity index 100% rename from swh/web/ui/templates/includes/search-form.html rename to swh/web/api/templates/includes/search-form.html diff --git a/swh/web/ui/templates/layout.html b/swh/web/api/templates/layout.html similarity index 66% rename from swh/web/ui/templates/layout.html rename to swh/web/api/templates/layout.html index a533fa2e..71593a94 100644 --- a/swh/web/ui/templates/layout.html +++ b/swh/web/api/templates/layout.html @@ -1,83 +1,75 @@ +{% load static %} {% block title %}{% endblock %} - + - - - - - - + + + + + +
-

{{ self.title() }}

+

{{ self.title }}

- {% with messages = get_flashed_messages(with_categories=true) %} - {% if messages %} -
- {% for category, message in messages %} - - {% endfor %} -
- {% endif %} - {% endwith %}
{% block content %}{% endblock %}
- back to top + back to top
diff --git a/swh/web/ui/templates/origin.html b/swh/web/api/templates/origin.html similarity index 100% rename from swh/web/ui/templates/origin.html rename to swh/web/api/templates/origin.html diff --git a/swh/web/ui/templates/person.html b/swh/web/api/templates/person.html similarity index 100% rename from swh/web/ui/templates/person.html rename to swh/web/api/templates/person.html diff --git a/swh/web/ui/templates/release.html b/swh/web/api/templates/release.html similarity index 100% rename from swh/web/ui/templates/release.html rename to swh/web/api/templates/release.html diff --git a/swh/web/ui/templates/revision-directory.html b/swh/web/api/templates/revision-directory.html similarity index 100% rename from swh/web/ui/templates/revision-directory.html rename to swh/web/api/templates/revision-directory.html diff --git a/swh/web/ui/templates/revision-log.html b/swh/web/api/templates/revision-log.html similarity index 100% rename from swh/web/ui/templates/revision-log.html rename to swh/web/api/templates/revision-log.html diff --git a/swh/web/ui/templates/revision.html b/swh/web/api/templates/revision.html similarity index 100% rename from swh/web/ui/templates/revision.html rename to swh/web/api/templates/revision.html diff --git a/swh/web/ui/templates/search.html b/swh/web/api/templates/search.html similarity index 100% rename from swh/web/ui/templates/search.html rename to swh/web/api/templates/search.html diff --git a/swh/web/ui/templates/symbols.html b/swh/web/api/templates/symbols.html similarity index 100% rename from swh/web/ui/templates/symbols.html rename to swh/web/api/templates/symbols.html diff --git a/swh/web/ui/tests/__init__.py b/swh/web/api/templatetags/__init__.py similarity index 100% rename from swh/web/ui/tests/__init__.py rename to swh/web/api/templatetags/__init__.py diff --git a/swh/web/api/templatetags/api_extras.py b/swh/web/api/templatetags/api_extras.py new file mode 100644 index 00000000..74fbaa5e --- /dev/null +++ b/swh/web/api/templatetags/api_extras.py @@ -0,0 +1,90 @@ +# Copyright (C) 2017 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import re + +from docutils.core import publish_parts +from docutils.writers.html4css1 import Writer, HTMLTranslator +from inspect import cleandoc + +from django import template +from django.utils.safestring import mark_safe + + +from pygments import highlight +from pygments.lexers import JsonLexer +from pygments.formatters import HtmlFormatter + +register = template.Library() + + +class NoHeaderHTMLTranslator(HTMLTranslator): + """ + Docutils translator subclass to customize the generation of HTML + from reST-formatted docstrings + """ + def __init__(self, document): + super().__init__(document) + self.body_prefix = [] + self.body_suffix = [] + + def visit_bullet_list(self, node): + self.context.append((self.compact_simple, self.compact_p)) + self.compact_p = None + self.compact_simple = self.is_compactable(node) + self.body.append(self.starttag(node, 'ul', CLASS='docstring')) + + +DOCSTRING_WRITER = Writer() +DOCSTRING_WRITER.translator_class = NoHeaderHTMLTranslator + + +@register.filter +def safe_docstring_display(docstring): + """ + Utility function to htmlize reST-formatted documentation in browsable + api. + """ + docstring = cleandoc(docstring) + return publish_parts(docstring, writer=DOCSTRING_WRITER)['html_body'] + + +@register.filter +def urlize_api_links(text): + """Utility function for decorating api links in browsable api. + + Args: + text: whose content matching links should be transformed into + contextual API or Browse html links. + + Returns + The text transformed if any link is found. + The text as is otherwise. + + """ + return re.sub(r'(/api/[^"<]*/|/browse/.*/)', + r'\1', + text) + + +@register.filter +def urlize_header_links(text): + """Utility function for decorating headers links in browsable api. + + Args + text: Text whose content contains Link header value + + Returns: + The text transformed with html link if any link is found. + The text as is otherwise. + + """ + return re.sub(r'<(/api/.*|/browse/.*)>', r'<\1>', + text) + + +@register.filter +def highlight_json(text): + return mark_safe(highlight(text, JsonLexer(), HtmlFormatter())) diff --git a/swh/web/api/tests/__init__.py b/swh/web/api/tests/__init__.py new file mode 100644 index 00000000..8bcb72e6 --- /dev/null +++ b/swh/web/api/tests/__init__.py @@ -0,0 +1,10 @@ +# Copyright (C) 2017 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import os +import django + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "swh.web.settings") +django.setup() diff --git a/swh/web/api/tests/swh_api_testcase.py b/swh/web/api/tests/swh_api_testcase.py new file mode 100644 index 00000000..86aad134 --- /dev/null +++ b/swh/web/api/tests/swh_api_testcase.py @@ -0,0 +1,70 @@ +# Copyright (C) 2015-2016 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU Affero General Public License version 3, or any later version +# See top-level LICENSE file for more information + +# Functions defined here are NOT DESIGNED FOR PRODUCTION + +from django.test import TestCase + +from swh.storage.api.client import RemoteStorage as Storage + +from swh.web import config + + +# Because the Storage's __init__ function does side effect at startup... +class RemoteStorageAdapter(Storage): + def __init__(self, base_url): + self.base_url = base_url + + +def _init_mock_storage(base_url='https://somewhere.org:4321'): + """Instanciate a remote storage whose goal is to be mocked in a test + context. + + NOT FOR PRODUCTION + + Returns: + An instance of swh.storage.api.client.RemoteStorage destined to be + mocked (it does not do any rest call) + + """ + return RemoteStorageAdapter(base_url) # destined to be used as mock + + +def create_config(base_url='https://somewhere.org:4321'): + """Function to initiate a flask app with storage designed to be mocked. + + Returns: + Tuple: + - app test client (for testing api, client decorator from flask) + - application's full configuration + - the storage instance to stub and mock + - the main app without any decoration + + NOT FOR PRODUCTION + + """ + storage = _init_mock_storage(base_url) + + swh_config = config.get_config() + + # inject the mock data + swh_config.update({'storage': storage}) + + return swh_config + + +class SWHApiTestCase(TestCase): + """Testing API class. + + """ + @classmethod + def setUpClass(cls): + super(SWHApiTestCase, cls).setUpClass() + cls.test_config = create_config() + cls.maxDiff = None + + @classmethod + def storage(cls): + return cls.test_config['storage'] diff --git a/swh/web/ui/tests/test_apidoc.py b/swh/web/api/tests/test_apidoc.py similarity index 70% rename from swh/web/ui/tests/test_apidoc.py rename to swh/web/api/tests/test_apidoc.py index 7f45dde6..7c7973f4 100644 --- a/swh/web/ui/tests/test_apidoc.py +++ b/swh/web/api/tests/test_apidoc.py @@ -1,119 +1,121 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information - from nose.tools import istest, nottest +from rest_framework.response import Response -from swh.web.ui import apidoc -from swh.web.ui.main import app -from swh.web.ui.tests.test_app import SWHApidocTestCase +from .swh_api_testcase import SWHApiTestCase +from swh.web.api import apidoc +from swh.web.api.apiurls import api_route -class APIDocTestCase(SWHApidocTestCase): +class APIDocTestCase(SWHApiTestCase): def setUp(self): self.arg_dict = { 'name': 'my_pretty_arg', 'default': 'some default value', 'type': apidoc.argtypes.sha1, 'doc': 'this arg does things' } self.stub_excs = [{'exc': apidoc.excs.badinput, 'doc': 'My exception documentation'}] self.stub_args = [{'name': 'stub_arg', 'default': 'some_default'}] self.stub_rule_list = [ {'rule': 'some/route/with/args/', 'methods': {'GET', 'HEAD', 'OPTIONS'}}, {'rule': 'some/doc/route/', 'methods': {'GET', 'HEAD', 'OPTIONS'}}, {'rule': 'some/other/route/', 'methods': {'GET', 'HEAD', 'OPTIONS'}} ] self.stub_return = { 'type': apidoc.rettypes.dict.value, 'doc': 'a dict with amazing properties' } @staticmethod @apidoc.route('/my/nodoc/url/') @nottest - def apidoc_nodoc_tester(arga, argb): - return arga + argb + def apidoc_nodoc_tester(request, arga=0, argb=0): + return Response(arga + argb) @istest def apidoc_nodoc_failure(self): with self.assertRaises(Exception): - self.client.get('/my/nodoc/url/') + self.client.get('/api/1/my/nodoc/url/') @staticmethod - @app.route('/some///') + @api_route(r'/some/(?P[0-9]+)/(?P[0-9]+)/', + 'some-doc-route') @apidoc.route('/some/doc/route/') @nottest - def apidoc_route_tester(myarg, myotherarg, akw=0): + def apidoc_route_tester(request, myarg, myotherarg, akw=0): """ Sample doc """ - return {'result': myarg + myotherarg + akw} + return {'result': int(myarg) + int(myotherarg) + akw} @istest def apidoc_route_doc(self): # when - rv = self.client.get('/some/doc/route/') + rv = self.client.get('/api/1/some/doc/route/') # then self.assertEqual(rv.status_code, 200) - self.assert_template_used('apidoc.html') + self.assertTemplateUsed('apidoc.html') @istest def apidoc_route_fn(self): # when - rv = self.client.get('/some/1/1/') + rv = self.client.get('/api/1/some/1/1/') # then self.assertEqual(rv.status_code, 200) @staticmethod - @app.route('/some/full///') + @api_route(r'/some/full/(?P[0-9]+)/(?P[0-9]+)/', + 'some-complete-doc-route') @apidoc.route('/some/complete/doc/route/') @apidoc.arg('myarg', default=67, argtype=apidoc.argtypes.int, argdoc='my arg') @apidoc.arg('myotherarg', default=42, argtype=apidoc.argtypes.int, argdoc='my other arg') @apidoc.param('limit', argtype=apidoc.argtypes.int, default=10, doc='Result limitation') @apidoc.header('Link', doc='Header link returns for pagination purpose') @apidoc.raises(exc=apidoc.excs.badinput, doc='Oops') @apidoc.returns(rettype=apidoc.rettypes.dict, retdoc='sum of args') @nottest - def apidoc_full_stack_tester(myarg, myotherarg, akw=0): + def apidoc_full_stack_tester(request, myarg, myotherarg, akw=0): """ Sample doc """ - return {'result': myarg + myotherarg + akw} + return {'result': int(myarg) + int(myotherarg) + akw} @istest def apidoc_full_stack_doc(self): # when - rv = self.client.get('/some/complete/doc/route/') + rv = self.client.get('/api/1/some/complete/doc/route/') # then self.assertEqual(rv.status_code, 200) - self.assert_template_used('apidoc.html') + self.assertTemplateUsed('apidoc.html') @istest def apidoc_full_stack_fn(self): # when - rv = self.client.get('/some/full/1/1/') + rv = self.client.get('/api/1/some/full/1/1/') # then self.assertEqual(rv.status_code, 200) diff --git a/swh/web/api/tests/test_apiresponse.py b/swh/web/api/tests/test_apiresponse.py new file mode 100644 index 00000000..cc4855ed --- /dev/null +++ b/swh/web/api/tests/test_apiresponse.py @@ -0,0 +1,177 @@ +# Copyright (C) 2015-2017 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU Affero General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import json +import unittest + +from rest_framework.test import APIRequestFactory + +from nose.tools import istest +from unittest.mock import patch + +from swh.web.api.apiresponse import ( + compute_link_header, transform, make_api_response, + filter_by_fields +) + +api_request_factory = APIRequestFactory() + + +class SWHComputeLinkHeaderTest(unittest.TestCase): + @istest + def compute_link_header(self): + rv = { + 'headers': {'link-next': 'foo', 'link-prev': 'bar'}, + 'results': [1, 2, 3] + } + options = {} + + # when + headers = compute_link_header( + rv, options) + + self.assertEquals(headers, { + 'Link': '; rel="next",; rel="previous"', + }) + + @istest + def compute_link_header_nothing_changed(self): + rv = {} + options = {} + + # when + headers = compute_link_header( + rv, options) + + self.assertEquals(headers, {}) + + @istest + def compute_link_header_nothing_changed_2(self): + rv = {'headers': {}} + options = {} + + # when + headers = compute_link_header( + rv, options) + + self.assertEquals(headers, {}) + + +class SWHTransformProcessorTest(unittest.TestCase): + @istest + def transform_only_return_results_1(self): + rv = {'results': {'some-key': 'some-value'}} + + self.assertEquals(transform(rv), {'some-key': 'some-value'}) + + @istest + def transform_only_return_results_2(self): + rv = {'headers': {'something': 'do changes'}, + 'results': {'some-key': 'some-value'}} + + self.assertEquals(transform(rv), {'some-key': 'some-value'}) + + @istest + def transform_do_remove_headers(self): + rv = {'headers': {'something': 'do changes'}, + 'some-key': 'some-value'} + + self.assertEquals(transform(rv), {'some-key': 'some-value'}) + + @istest + def transform_do_nothing(self): + rv = {'some-key': 'some-value'} + + self.assertEquals(transform(rv), {'some-key': 'some-value'}) + + +class RendererTestCase(unittest.TestCase): + + @patch('swh.web.api.apiresponse.json') + @patch('swh.web.api.apiresponse.filter_by_fields') + @patch('swh.web.api.apiresponse.utils.shorten_path') + @istest + def swh_multi_response_mimetype(self, mock_shorten_path, + mock_filter, mock_json): + # given + data = { + 'data': [12, 34], + 'id': 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc' + } + + mock_filter.return_value = data + mock_shorten_path.return_value = 'my_short_path' + + accepted_response_formats = {'html': 'text/html', + 'yaml': 'application/yaml', + 'json': 'application/json'} + + for format in accepted_response_formats: + + request = api_request_factory.get('/api/test/path/') + + mime_type = accepted_response_formats[format] + setattr(request, 'accepted_media_type', mime_type) + + if mime_type == 'text/html': + + expected_data = { + 'response_data': json.dumps(data), + 'request': request, + 'headers_data': {}, + 'heading': 'my_short_path', + 'status_code': 200 + } + + mock_json.dumps.return_value = json.dumps(data) + else: + expected_data = data + + # when + + rv = make_api_response(request, data) + + # then + mock_filter.assert_called_with(request, data) + self.assertEqual(rv.data, expected_data) + self.assertEqual(rv.status_code, 200) + if mime_type == 'text/html': + self.assertEqual(rv.template_name, 'apidoc.html') + + @istest + def swh_filter_renderer_do_nothing(self): + # given + input_data = {'a': 'some-data'} + + request = api_request_factory.get('/api/test/path/', data={}) + setattr(request, 'query_params', request.GET) + + # when + actual_data = filter_by_fields(request, input_data) + + # then + self.assertEquals(actual_data, input_data) + + @patch('swh.web.api.apiresponse.utils') + @istest + def swh_filter_renderer_do_filter(self, mock_utils): + # given + mock_utils.filter_field_keys.return_value = {'a': 'some-data'} + + request = api_request_factory.get('/api/test/path/', + data={'fields': 'a,c'}) + setattr(request, 'query_params', request.GET) + + input_data = {'a': 'some-data', + 'b': 'some-other-data'} + + # when + actual_data = filter_by_fields(request, input_data) + + # then + self.assertEquals(actual_data, {'a': 'some-data'}) + + mock_utils.filter_field_keys.assert_called_once_with(input_data, + {'a', 'c'}) diff --git a/swh/web/ui/tests/test_backend.py b/swh/web/api/tests/test_backend.py similarity index 79% rename from swh/web/ui/tests/test_backend.py rename to swh/web/api/tests/test_backend.py index 4262278d..877be081 100644 --- a/swh/web/ui/tests/test_backend.py +++ b/swh/web/api/tests/test_backend.py @@ -1,936 +1,938 @@ # Copyright (C) 2015-2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime from nose.tools import istest from unittest.mock import MagicMock from swh.model import hashutil -from swh.web.ui import backend -from swh.web.ui.tests import test_app +from .swh_api_testcase import SWHApiTestCase +from swh.web.api import backend -class BackendTestCase(test_app.SWHApiTestCase): + +class BackendTestCase(SWHApiTestCase): def setUp(self): self.origin_visit1 = { 'date': datetime.datetime( 2015, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc), 'origin': 1, 'visit': 1 } @istest def content_get_ko_not_found_1(self): # given sha1_bin = hashutil.hash_to_bytes( '456caf10e9535160d90e874b45aa426de762f777') - self.storage.content_get = MagicMock(return_value=None) + + self.storage().content_get = MagicMock(return_value=None) # when actual_content = backend.content_get(sha1_bin) # then self.assertIsNone(actual_content) - self.storage.content_get.assert_called_once_with( + self.storage().content_get.assert_called_once_with( [sha1_bin]) @istest def content_get_ko_not_found_empty_result(self): # given sha1_bin = hashutil.hash_to_bytes( '456caf10e9535160d90e874b45aa426de762f19f') - self.storage.content_get = MagicMock(return_value=[]) + self.storage().content_get = MagicMock(return_value=[]) # when actual_content = backend.content_get(sha1_bin) # then self.assertIsNone(actual_content) - self.storage.content_get.assert_called_once_with( + self.storage().content_get.assert_called_once_with( [sha1_bin]) @istest def content_ctags_search_1(self): # given - self.storage.content_ctags_search = MagicMock( + self.storage().content_ctags_search = MagicMock( return_value="some-result") # when actual_ctags = backend.content_ctags_search( 'foo', last_sha1='some-hash', limit=1) # then self.assertEquals(actual_ctags, 'some-result') - self.storage.content_ctags_search.assert_called_once_with( + self.storage().content_ctags_search.assert_called_once_with( 'foo', last_sha1='some-hash', limit=1) @istest def content_ctags_search_2(self): # given - self.storage.content_ctags_search = MagicMock( + self.storage().content_ctags_search = MagicMock( return_value="some other result") # when actual_ctags = backend.content_ctags_search( 'foo|bar', last_sha1='some-hash', limit=2) # then self.assertEquals(actual_ctags, 'some other result') - self.storage.content_ctags_search.assert_called_once_with( + self.storage().content_ctags_search.assert_called_once_with( 'foo|bar', last_sha1='some-hash', limit=2) @istest def content_ctags_search_3(self): # given - self.storage.content_ctags_search = MagicMock( + self.storage().content_ctags_search = MagicMock( return_value="yet another result") # when actual_ctags = backend.content_ctags_search( 'bar', last_sha1='some-hash', limit=1000) # then self.assertEquals(actual_ctags, 'yet another result') - self.storage.content_ctags_search.assert_called_once_with( + self.storage().content_ctags_search.assert_called_once_with( 'bar', last_sha1='some-hash', limit=50) @istest def content_get(self): # given sha1_bin = hashutil.hash_to_bytes( '123caf10e9535160d90e874b45aa426de762f19f') stub_contents = [{ 'sha1': sha1_bin, 'data': b'binary data', }] - self.storage.content_get = MagicMock(return_value=stub_contents) + self.storage().content_get = MagicMock(return_value=stub_contents) # when actual_content = backend.content_get(sha1_bin) # then self.assertEquals(actual_content, stub_contents[0]) - self.storage.content_get.assert_called_once_with( + self.storage().content_get.assert_called_once_with( [sha1_bin]) @istest def content_find_ko_no_result(self): # given sha1_bin = hashutil.hash_to_bytes( '123caf10e9535160d90e874b45aa426de762f19f') - self.storage.content_find = MagicMock(return_value=None) + self.storage().content_find = MagicMock(return_value=None) # when actual_lookup = backend.content_find('sha1_git', sha1_bin) # then self.assertIsNone(actual_lookup) - self.storage.content_find.assert_called_once_with( + self.storage().content_find.assert_called_once_with( {'sha1_git': sha1_bin}) @istest def content_find(self): # given sha1_bin = hashutil.hash_to_bytes( '456caf10e9535160d90e874b45aa426de762f19f') - self.storage.content_find = MagicMock(return_value=(1, 2, 3)) + self.storage().content_find = MagicMock(return_value=(1, 2, 3)) # when actual_content = backend.content_find('sha1', sha1_bin) # then self.assertEquals(actual_content, (1, 2, 3)) # check the function has been called with parameters - self.storage.content_find.assert_called_with({'sha1': sha1_bin}) + self.storage().content_find.assert_called_with({'sha1': sha1_bin}) @istest def content_find_provenance_ko_no_result(self): # given sha1_bin = hashutil.hash_to_bytes( '123caf10e9535160d90e874b45aa426de762f19f') - self.storage.content_find_provenance = MagicMock( + self.storage().content_find_provenance = MagicMock( return_value=(x for x in [])) # when actual_lookup = backend.content_find_provenance('sha1_git', sha1_bin) # then self.assertEquals(list(actual_lookup), []) - self.storage.content_find_provenance.assert_called_once_with( + self.storage().content_find_provenance.assert_called_once_with( {'sha1_git': sha1_bin}) @istest def content_ctags_get(self): # given sha1_bin = hashutil.hash_to_bytes( '456caf10e9535160d90e874b45aa426de762f19f') - self.storage.content_ctags_get = MagicMock( + self.storage().content_ctags_get = MagicMock( return_value=[1, 2, 3]) # when actual_content = backend.content_ctags_get(sha1_bin) # then self.assertEquals(actual_content, [1, 2, 3]) - self.storage.content_ctags_get.assert_called_with( + self.storage().content_ctags_get.assert_called_with( [sha1_bin]) @istest def content_ctags_get_no_result(self): # given sha1_bin = hashutil.hash_to_bytes( '456caf10e9535160d90e874b45aa426de762f19f') - self.storage.content_ctags_get = MagicMock( + self.storage().content_ctags_get = MagicMock( return_value=[]) # when actual_content = backend.content_ctags_get(sha1_bin) # then self.assertEquals(actual_content, []) - self.storage.content_ctags_get.assert_called_with( + self.storage().content_ctags_get.assert_called_with( [sha1_bin]) @istest def content_filetype_get(self): # given sha1_bin = hashutil.hash_to_bytes( '456caf10e9535160d90e874b45aa426de762f19f') - self.storage.content_mimetype_get = MagicMock( + self.storage().content_mimetype_get = MagicMock( return_value=[1, 2, 3]) # when actual_content = backend.content_filetype_get(sha1_bin) # then self.assertEquals(actual_content, 1) - self.storage.content_mimetype_get.assert_called_with( + self.storage().content_mimetype_get.assert_called_with( [sha1_bin]) @istest def content_filetype_get_no_result(self): # given sha1_bin = hashutil.hash_to_bytes( '456caf10e9535160d90e874b45aa426de762f19f') - self.storage.content_mimetype_get = MagicMock( + self.storage().content_mimetype_get = MagicMock( return_value=[]) # when actual_content = backend.content_filetype_get(sha1_bin) # then self.assertIsNone(actual_content) - self.storage.content_mimetype_get.assert_called_with( + self.storage().content_mimetype_get.assert_called_with( [sha1_bin]) @istest def content_language_get(self): # given sha1_bin = hashutil.hash_to_bytes( '456caf10e9535160d90e874b45aa426de762f19f') - self.storage.content_language_get = MagicMock( + self.storage().content_language_get = MagicMock( return_value=[1, 2, 3]) # when actual_content = backend.content_language_get(sha1_bin) # then self.assertEquals(actual_content, 1) - self.storage.content_language_get.assert_called_with( + self.storage().content_language_get.assert_called_with( [sha1_bin]) @istest def content_language_get_no_result(self): # given sha1_bin = hashutil.hash_to_bytes( '456caf10e9535160d90e874b45aa426de762f19f') - self.storage.content_language_get = MagicMock( + self.storage().content_language_get = MagicMock( return_value=[]) # when actual_content = backend.content_language_get(sha1_bin) # then self.assertIsNone(actual_content) - self.storage.content_language_get.assert_called_with( + self.storage().content_language_get.assert_called_with( [sha1_bin]) @istest def content_license_get(self): # given sha1_bin = hashutil.hash_to_bytes( '456caf10e9535160d90e874b45aa426de762f19f') - self.storage.content_fossology_license_get = MagicMock( + self.storage().content_fossology_license_get = MagicMock( return_value=[1, 2, 3]) # when actual_content = backend.content_license_get(sha1_bin) # then self.assertEquals(actual_content, 1) - self.storage.content_fossology_license_get.assert_called_with( + self.storage().content_fossology_license_get.assert_called_with( [sha1_bin]) @istest def content_license_get_no_result(self): # given sha1_bin = hashutil.hash_to_bytes( '456caf10e9535160d90e874b45aa426de762f19f') - self.storage.content_fossology_license_get = MagicMock( + self.storage().content_fossology_license_get = MagicMock( return_value=[]) # when actual_content = backend.content_license_get(sha1_bin) # then self.assertIsNone(actual_content) - self.storage.content_fossology_license_get.assert_called_with( + self.storage().content_fossology_license_get.assert_called_with( [sha1_bin]) @istest def content_find_provenance(self): # given sha1_bin = hashutil.hash_to_bytes( '456caf10e9535160d90e874b45aa426de762f19f') - self.storage.content_find_provenance = MagicMock( + self.storage().content_find_provenance = MagicMock( return_value=(x for x in (1, 2, 3))) # when actual_content = backend.content_find_provenance('sha1', sha1_bin) # then self.assertEquals(list(actual_content), [1, 2, 3]) # check the function has been called with parameters - self.storage.content_find_provenance.assert_called_with( + self.storage().content_find_provenance.assert_called_with( {'sha1': sha1_bin}) @istest def content_missing_per_sha1_none(self): # given sha1s_bin = [hashutil.hash_to_bytes( '456caf10e9535160d90e874b45aa426de762f19f'), hashutil.hash_to_bytes( '745bab676c8f3cec8016e0c39ea61cf57e518865' )] - self.storage.content_missing_per_sha1 = MagicMock(return_value=[]) + self.storage().content_missing_per_sha1 = MagicMock(return_value=[]) # when actual_content = backend.content_missing_per_sha1(sha1s_bin) # then self.assertEquals(actual_content, []) - self.storage.content_missing_per_sha1.assert_called_with(sha1s_bin) + self.storage().content_missing_per_sha1.assert_called_with(sha1s_bin) @istest def content_missing_per_sha1_some(self): # given sha1s_bin = [hashutil.hash_to_bytes( '456caf10e9535160d90e874b45aa426de762f19f'), hashutil.hash_to_bytes( '745bab676c8f3cec8016e0c39ea61cf57e518865' )] - self.storage.content_missing_per_sha1 = MagicMock(return_value=[ + self.storage().content_missing_per_sha1 = MagicMock(return_value=[ hashutil.hash_to_bytes( '745bab676c8f3cec8016e0c39ea61cf57e518865' )]) # when actual_content = backend.content_missing_per_sha1(sha1s_bin) # then self.assertEquals(actual_content, [hashutil.hash_to_bytes( '745bab676c8f3cec8016e0c39ea61cf57e518865' )]) - self.storage.content_missing_per_sha1.assert_called_with(sha1s_bin) + self.storage().content_missing_per_sha1.assert_called_with(sha1s_bin) @istest def origin_get_by_id(self): # given - self.storage.origin_get = MagicMock(return_value={ + self.storage().origin_get = MagicMock(return_value={ 'id': 'origin-id', 'lister': 'uuid-lister', 'project': 'uuid-project', 'url': 'ftp://some/url/to/origin', 'type': 'ftp'}) # when actual_origin = backend.origin_get({'id': 'origin-id'}) # then self.assertEqual(actual_origin, {'id': 'origin-id', 'lister': 'uuid-lister', 'project': 'uuid-project', 'url': 'ftp://some/url/to/origin', 'type': 'ftp'}) - self.storage.origin_get.assert_called_with({'id': 'origin-id'}) + self.storage().origin_get.assert_called_with({'id': 'origin-id'}) @istest def origin_get_by_type_url(self): # given - self.storage.origin_get = MagicMock(return_value={ + self.storage().origin_get = MagicMock(return_value={ 'id': 'origin-id', 'lister': 'uuid-lister', 'project': 'uuid-project', 'url': 'ftp://some/url/to/origin', 'type': 'ftp'}) # when actual_origin = backend.origin_get({'type': 'ftp', 'url': 'ftp://some/url/to/origin'}) # then self.assertEqual(actual_origin, {'id': 'origin-id', 'lister': 'uuid-lister', 'project': 'uuid-project', 'url': 'ftp://some/url/to/origin', 'type': 'ftp'}) - self.storage.origin_get.assert_called_with( + self.storage().origin_get.assert_called_with( {'type': 'ftp', 'url': 'ftp://some/url/to/origin'}) @istest def person_get(self): # given - self.storage.person_get = MagicMock(return_value=[{ + self.storage().person_get = MagicMock(return_value=[{ 'id': 'person-id', 'name': 'blah'}]) # when actual_person = backend.person_get('person-id') # then self.assertEqual(actual_person, {'id': 'person-id', 'name': 'blah'}) - self.storage.person_get.assert_called_with(['person-id']) + self.storage().person_get.assert_called_with(['person-id']) @istest def directory_get_not_found(self): # given sha1_bin = hashutil.hash_to_bytes( '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') - self.storage.directory_get = MagicMock(return_value=None) + self.storage().directory_get = MagicMock(return_value=None) # when actual_directory = backend.directory_get(sha1_bin) # then self.assertEquals(actual_directory, None) - self.storage.directory_get.assert_called_with([sha1_bin]) + self.storage().directory_get.assert_called_with([sha1_bin]) @istest def directory_get(self): # given sha1_bin = hashutil.hash_to_bytes( '51f71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') sha1_bin2 = hashutil.hash_to_bytes( '62071b8614fcd89ccd17ca2b1d9e66c5b00a6d03') stub_dir = {'id': sha1_bin, 'revision': b'sha1-blah'} stub_dir2 = {'id': sha1_bin2, 'revision': b'sha1-foobar'} - self.storage.directory_get = MagicMock(return_value=[stub_dir, - stub_dir2]) + self.storage().directory_get = MagicMock(return_value=[stub_dir, + stub_dir2]) # when actual_directory = backend.directory_get(sha1_bin) # then self.assertEquals(actual_directory, stub_dir) - self.storage.directory_get.assert_called_with([sha1_bin]) + self.storage().directory_get.assert_called_with([sha1_bin]) @istest def directory_ls_empty_result(self): # given sha1_bin = hashutil.hash_to_bytes( '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') - self.storage.directory_ls = MagicMock(return_value=[]) + self.storage().directory_ls = MagicMock(return_value=[]) # when actual_directory = backend.directory_ls(sha1_bin) # then self.assertEquals(actual_directory, []) - self.storage.directory_ls.assert_called_with(sha1_bin, False) + self.storage().directory_ls.assert_called_with(sha1_bin, False) @istest def directory_ls(self): # given sha1_bin = hashutil.hash_to_bytes( '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') stub_dir_entries = [{ 'sha1': hashutil.hash_to_bytes( '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5'), 'sha256': hashutil.hash_to_bytes( '39007420ca5de7cb3cfc15196335507e' 'e76c98930e7e0afa4d2747d3bf96c926'), 'sha1_git': hashutil.hash_to_bytes( '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'), 'target': hashutil.hash_to_bytes( '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'), 'dir_id': hashutil.hash_to_bytes( '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'), 'name': b'bob', 'type': 10, }] - self.storage.directory_ls = MagicMock( + self.storage().directory_ls = MagicMock( return_value=stub_dir_entries) actual_directory = backend.directory_ls(sha1_bin, recursive=True) # then self.assertIsNotNone(actual_directory) self.assertEqual(list(actual_directory), stub_dir_entries) - self.storage.directory_ls.assert_called_with(sha1_bin, True) + self.storage().directory_ls.assert_called_with(sha1_bin, True) @istest def release_get_not_found(self): # given sha1_bin = hashutil.hash_to_bytes( '65a55bbdf3629f916219feb3dcc7393ded1bc8db') - self.storage.release_get = MagicMock(return_value=[]) + self.storage().release_get = MagicMock(return_value=[]) # when actual_release = backend.release_get(sha1_bin) # then self.assertIsNone(actual_release) - self.storage.release_get.assert_called_with([sha1_bin]) + self.storage().release_get.assert_called_with([sha1_bin]) @istest def release_get(self): # given sha1_bin = hashutil.hash_to_bytes( '65a55bbdf3629f916219feb3dcc7393ded1bc8db') stub_releases = [{ 'id': sha1_bin, 'target': None, 'date': datetime.datetime(2015, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc), 'name': b'v0.0.1', 'message': b'synthetic release', 'synthetic': True, }] - self.storage.release_get = MagicMock(return_value=stub_releases) + self.storage().release_get = MagicMock(return_value=stub_releases) # when actual_release = backend.release_get(sha1_bin) # then self.assertEqual(actual_release, stub_releases[0]) - self.storage.release_get.assert_called_with([sha1_bin]) + self.storage().release_get.assert_called_with([sha1_bin]) @istest def revision_get_by_not_found(self): # given - self.storage.revision_get_by = MagicMock(return_value=[]) + self.storage().revision_get_by = MagicMock(return_value=[]) # when actual_revision = backend.revision_get_by(10, 'master', 'ts2') # then self.assertIsNone(actual_revision) - self.storage.revision_get_by.assert_called_with(10, 'master', - timestamp='ts2', - limit=1) + self.storage().revision_get_by.assert_called_with(10, 'master', + timestamp='ts2', + limit=1) @istest def revision_get_by(self): # given - self.storage.revision_get_by = MagicMock(return_value=[{'id': 1}]) + self.storage().revision_get_by = MagicMock(return_value=[{'id': 1}]) # when actual_revisions = backend.revision_get_by(100, 'dev', 'ts') # then self.assertEquals(actual_revisions, {'id': 1}) - self.storage.revision_get_by.assert_called_with(100, 'dev', - timestamp='ts', - limit=1) + self.storage().revision_get_by.assert_called_with(100, 'dev', + timestamp='ts', + limit=1) @istest def revision_get_not_found(self): # given sha1_bin = hashutil.hash_to_bytes( '18d8be353ed3480476f032475e7c233eff7371d5') - self.storage.revision_get = MagicMock(return_value=[]) + self.storage().revision_get = MagicMock(return_value=[]) # when actual_revision = backend.revision_get(sha1_bin) # then self.assertIsNone(actual_revision) - self.storage.revision_get.assert_called_with([sha1_bin]) + self.storage().revision_get.assert_called_with([sha1_bin]) @istest def revision_get(self): # given sha1_bin = hashutil.hash_to_bytes( '18d8be353ed3480476f032475e7c233eff7371d5') stub_revisions = [{ 'id': sha1_bin, 'directory': hashutil.hash_to_bytes( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'bill & boule', 'email': b'bill@boule.org', }, 'committer': { 'name': b'boule & bill', 'email': b'boule@bill.org', }, 'message': b'elegant fix for bug 31415957', 'date': datetime.datetime(2000, 1, 17, 11, 23, 54), 'date_offset': 0, 'committer_date': datetime.datetime(2000, 1, 17, 11, 23, 54), 'committer_date_offset': 0, 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': [], }] - self.storage.revision_get = MagicMock(return_value=stub_revisions) + self.storage().revision_get = MagicMock(return_value=stub_revisions) # when actual_revision = backend.revision_get(sha1_bin) # then self.assertEqual(actual_revision, stub_revisions[0]) - self.storage.revision_get.assert_called_with([sha1_bin]) + self.storage().revision_get.assert_called_with([sha1_bin]) @istest def revision_get_multiple(self): # given sha1_bin = hashutil.hash_to_bytes( '18d8be353ed3480476f032475e7c233eff7371d5') sha1_other = hashutil.hash_to_bytes( 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc') stub_revisions = [ { 'id': sha1_bin, 'directory': hashutil.hash_to_bytes( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'bill & boule', 'email': b'bill@boule.org', }, 'committer': { 'name': b'boule & bill', 'email': b'boule@bill.org', }, 'message': b'elegant fix for bug 31415957', 'date': datetime.datetime(2000, 1, 17, 11, 23, 54), 'date_offset': 0, 'committer_date': datetime.datetime(2000, 1, 17, 11, 23, 54), 'committer_date_offset': 0, 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': [], }, { 'id': sha1_other, 'directory': hashutil.hash_to_bytes( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'name', 'email': b'name@surname.org', }, 'committer': { 'name': b'name', 'email': b'name@surname.org', }, 'message': b'ugly fix for bug 42', 'date': datetime.datetime(2000, 1, 12, 5, 23, 54), 'date_offset': 0, 'committer_date': datetime.datetime(2000, 1, 12, 5, 23, 54), 'committer_date_offset': 0, 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': [], } ] - self.storage.revision_get = MagicMock( + self.storage().revision_get = MagicMock( return_value=stub_revisions) # when actual_revision = backend.revision_get_multiple([sha1_bin, sha1_other]) # then self.assertEqual(actual_revision, stub_revisions) - self.storage.revision_get.assert_called_with( + self.storage().revision_get.assert_called_with( [sha1_bin, sha1_other]) @istest def revision_get_multiple_none_found(self): # given sha1_bin = hashutil.hash_to_bytes( '18d8be353ed3480476f032475e7c233eff7371d5') sha1_other = hashutil.hash_to_bytes( 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc') - self.storage.revision_get = MagicMock( + self.storage().revision_get = MagicMock( return_value=[]) # when actual_revision = backend.revision_get_multiple([sha1_bin, sha1_other]) # then self.assertEqual(actual_revision, []) - self.storage.revision_get.assert_called_with( + self.storage().revision_get.assert_called_with( [sha1_bin, sha1_other]) @istest def revision_log(self): # given sha1_bin = hashutil.hash_to_bytes( '28d8be353ed3480476f032475e7c233eff7371d5') stub_revision_log = [{ 'id': sha1_bin, 'directory': hashutil.hash_to_bytes( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'bill & boule', 'email': b'bill@boule.org', }, 'committer': { 'name': b'boule & bill', 'email': b'boule@bill.org', }, 'message': b'elegant fix for bug 31415957', 'date': datetime.datetime(2000, 1, 17, 11, 23, 54), 'date_offset': 0, 'committer_date': datetime.datetime(2000, 1, 17, 11, 23, 54), 'committer_date_offset': 0, 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': [], }] - self.storage.revision_log = MagicMock(return_value=stub_revision_log) + self.storage().revision_log = MagicMock(return_value=stub_revision_log) # when actual_revision = backend.revision_log(sha1_bin, limit=1) # then self.assertEqual(list(actual_revision), stub_revision_log) - self.storage.revision_log.assert_called_with([sha1_bin], 1) + self.storage().revision_log.assert_called_with([sha1_bin], 1) @istest def revision_log_by(self): # given sha1_bin = hashutil.hash_to_bytes( '28d8be353ed3480476f032475e7c233eff7371d5') stub_revision_log = [{ 'id': sha1_bin, 'directory': hashutil.hash_to_bytes( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'bill & boule', 'email': b'bill@boule.org', }, 'committer': { 'name': b'boule & bill', 'email': b'boule@bill.org', }, 'message': b'elegant fix for bug 31415957', 'date': datetime.datetime(2000, 1, 17, 11, 23, 54), 'date_offset': 0, 'committer_date': datetime.datetime(2000, 1, 17, 11, 23, 54), 'committer_date_offset': 0, 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': [], }] - self.storage.revision_log_by = MagicMock( + self.storage().revision_log_by = MagicMock( return_value=stub_revision_log) # when actual_log = backend.revision_log_by(1, 'refs/heads/master', None, limit=1) # then self.assertEqual(actual_log, stub_revision_log) - self.storage.revision_log.assert_called_with([sha1_bin], 1) + self.storage().revision_log.assert_called_with([sha1_bin], 1) @istest def revision_log_by_norev(self): # given sha1_bin = hashutil.hash_to_bytes( '28d8be353ed3480476f032475e7c233eff7371d5') - self.storage.revision_log_by = MagicMock(return_value=None) + self.storage().revision_log_by = MagicMock(return_value=None) # when actual_log = backend.revision_log_by(1, 'refs/heads/master', None, limit=1) # then self.assertEqual(actual_log, None) - self.storage.revision_log.assert_called_with([sha1_bin], 1) + self.storage().revision_log.assert_called_with([sha1_bin], 1) @istest def stat_counters(self): # given input_stats = { "content": 1770830, "directory": 211683, "directory_entry_dir": 209167, "directory_entry_file": 1807094, "directory_entry_rev": 0, "entity": 0, "entity_history": 0, "occurrence": 0, "occurrence_history": 19600, "origin": 1096, "person": 0, "release": 8584, "revision": 7792, "revision_history": 0, "skipped_content": 0 } - self.storage.stat_counters = MagicMock(return_value=input_stats) + self.storage().stat_counters = MagicMock(return_value=input_stats) # when actual_stats = backend.stat_counters() # then expected_stats = input_stats self.assertEqual(actual_stats, expected_stats) - self.storage.stat_counters.assert_called_with() + self.storage().stat_counters.assert_called_with() @istest def lookup_origin_visits(self): # given expected_origin_visits = [ self.origin_visit1, { 'date': datetime.datetime( 2013, 7, 1, 20, 0, 0, tzinfo=datetime.timezone.utc), 'origin': 1, 'visit': 2 }, { 'date': datetime.datetime( 2015, 1, 1, 21, 0, 0, tzinfo=datetime.timezone.utc), 'origin': 1, 'visit': 3 }] - self.storage.origin_visit_get = MagicMock( + self.storage().origin_visit_get = MagicMock( return_value=expected_origin_visits) # when actual_origin_visits = backend.lookup_origin_visits(5) # then self.assertEqual(list(actual_origin_visits), expected_origin_visits) - self.storage.origin_visit_get.assert_called_with( + self.storage().origin_visit_get.assert_called_with( 5, last_visit=None, limit=10) @istest def lookup_origin_visit(self): # given - self.storage.origin_visit_get_by = MagicMock( + self.storage().origin_visit_get_by = MagicMock( return_value=self.origin_visit1) # when actual_origin_visit = backend.lookup_origin_visit(10, 1) # then self.assertEqual(actual_origin_visit, self.origin_visit1) - self.storage.origin_visit_get_by.assert_called_with(10, 1) + self.storage().origin_visit_get_by.assert_called_with(10, 1) @istest def lookup_origin_visit_none(self): # given - self.storage.origin_visit_get_by = MagicMock( + self.storage().origin_visit_get_by = MagicMock( return_value=None) # when actual_origin_visit = backend.lookup_origin_visit(1, 10) # then self.assertIsNone(actual_origin_visit) - self.storage.origin_visit_get_by.assert_called_with(1, 10) + self.storage().origin_visit_get_by.assert_called_with(1, 10) @istest def directory_entry_get_by_path(self): # given stub_dir_entry = {'id': b'dir-id', 'type': 'dir', 'name': b'some/path/foo'} - self.storage.directory_entry_get_by_path = MagicMock( + self.storage().directory_entry_get_by_path = MagicMock( return_value=stub_dir_entry) # when actual_dir_entry = backend.directory_entry_get_by_path(b'dir-sha1', 'some/path/foo') self.assertEquals(actual_dir_entry, stub_dir_entry) - self.storage.directory_entry_get_by_path.assert_called_once_with( + self.storage().directory_entry_get_by_path.assert_called_once_with( b'dir-sha1', [b'some', b'path', b'foo']) @istest def entity_get(self): # given stub_entities = [{'uuid': 'e8c3fc2e-a932-4fd7-8f8e-c40645eb35a7', 'parent': 'aee991a0-f8d7-4295-a201-d1ce2efc9fb2'}, {'uuid': 'aee991a0-f8d7-4295-a201-d1ce2efc9fb2', 'parent': None}] - self.storage.entity_get = MagicMock(return_value=stub_entities) + self.storage().entity_get = MagicMock(return_value=stub_entities) # when actual_entities = backend.entity_get( 'e8c3fc2e-a932-4fd7-8f8e-c40645eb35a7') # then self.assertEquals(actual_entities, stub_entities) - self.storage.entity_get.assert_called_once_with( + self.storage().entity_get.assert_called_once_with( 'e8c3fc2e-a932-4fd7-8f8e-c40645eb35a7') diff --git a/swh/web/ui/tests/test_converters.py b/swh/web/api/tests/test_converters.py similarity index 99% rename from swh/web/ui/tests/test_converters.py rename to swh/web/api/tests/test_converters.py index 840d36f4..537bbc6a 100644 --- a/swh/web/ui/tests/test_converters.py +++ b/swh/web/api/tests/test_converters.py @@ -1,742 +1,743 @@ # Copyright (C) 2015-2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import unittest from nose.tools import istest from swh.model import hashutil -from swh.web.ui import converters + +from swh.web.api import converters class ConvertersTestCase(unittest.TestCase): @istest def from_swh(self): some_input = { 'a': 'something', 'b': 'someone', 'c': b'sharp-0.3.4.tgz', 'd': hashutil.hash_to_bytes( 'b04caf10e9535160d90e874b45aa426de762f19f'), 'e': b'sharp.html/doc_002dS_005fISREG.html', 'g': [b'utf-8-to-decode', b'another-one'], 'h': 'something filtered', 'i': {'e': b'something'}, 'j': { 'k': { 'l': [b'bytes thing', b'another thingy', b''], 'n': 'dont care either' }, 'm': 'dont care' }, 'o': 'something', 'p': b'foo', 'q': {'extra-headers': [['a', b'intact']]}, 'w': None, 'r': {'p': 'also intact', 'q': 'bar'}, 's': { 'timestamp': 42, 'offset': -420, 'negative_utc': None, }, 's1': { 'timestamp': {'seconds': 42, 'microseconds': 0}, 'offset': -420, 'negative_utc': None, }, 's2': datetime.datetime( 2013, 7, 1, 20, 0, 0, tzinfo=datetime.timezone.utc), 't': None, 'u': None, 'v': None, 'x': None, } expected_output = { 'a': 'something', 'b': 'someone', 'c': 'sharp-0.3.4.tgz', 'd': 'b04caf10e9535160d90e874b45aa426de762f19f', 'e': 'sharp.html/doc_002dS_005fISREG.html', 'g': ['utf-8-to-decode', 'another-one'], 'i': {'e': 'something'}, 'j': { 'k': { 'l': ['bytes thing', 'another thingy', ''] } }, 'p': 'foo', 'q': {'extra-headers': [['a', 'intact']]}, 'w': {}, 'r': {'p': 'also intact', 'q': 'bar'}, 's': '1969-12-31T17:00:42-07:00', 's1': '1969-12-31T17:00:42-07:00', 's2': '2013-07-01T20:00:00+00:00', 'u': {}, 'v': [], 'x': None, } actual_output = converters.from_swh( some_input, hashess={'d', 'o', 'x'}, bytess={'c', 'e', 'g', 'l'}, dates={'s', 's1', 's2'}, blacklist={'h', 'm', 'n', 'o'}, removables_if_empty={'t'}, empty_dict={'u'}, empty_list={'v'}, convert={'p', 'q', 'w'}, convert_fn=converters.convert_revision_metadata) self.assertEquals(expected_output, actual_output) @istest def from_swh_edge_cases_do_no_conversion_if_none_or_not_bytes(self): some_input = { 'a': 'something', 'b': None, 'c': 'someone', 'd': None, } expected_output = { 'a': 'something', 'b': None, 'c': 'someone', 'd': None, } actual_output = converters.from_swh(some_input, hashess={'a', 'b'}, bytess={'c', 'd'}) self.assertEquals(expected_output, actual_output) @istest def from_swh_edge_cases_convert_invalid_utf8_bytes(self): some_input = { 'a': 'something', 'b': 'someone', 'c': b'a name \xff', 'd': b'an email \xff', } expected_output = { 'a': 'something', 'b': 'someone', 'c': 'a name \\xff', 'd': 'an email \\xff', 'decoding_failures': ['c', 'd'] } actual_output = converters.from_swh(some_input, hashess={'a', 'b'}, bytess={'c', 'd'}) for v in ['a', 'b', 'c', 'd']: self.assertEqual(expected_output[v], actual_output[v]) self.assertEqual(len(expected_output['decoding_failures']), len(actual_output['decoding_failures'])) for v in expected_output['decoding_failures']: self.assertTrue(v in actual_output['decoding_failures']) @istest def from_swh_empty(self): # when self.assertEquals({}, converters.from_swh({})) @istest def from_swh_none(self): # when self.assertIsNone(converters.from_swh(None)) @istest def from_provenance(self): # given input_provenance = { 'origin': 10, 'visit': 1, 'content': hashutil.hash_to_bytes( '321caf10e9535160d90e874b45aa426de762f19f'), 'revision': hashutil.hash_to_bytes( '123caf10e9535160d90e874b45aa426de762f19f'), 'path': b'octave-3.4.0/doc/interpreter/octave/doc_002dS_005fISREG' } expected_provenance = { 'origin': 10, 'visit': 1, 'content': '321caf10e9535160d90e874b45aa426de762f19f', 'revision': '123caf10e9535160d90e874b45aa426de762f19f', 'path': 'octave-3.4.0/doc/interpreter/octave/doc_002dS_005fISREG' } # when actual_provenance = converters.from_provenance(input_provenance) # then self.assertEqual(actual_provenance, expected_provenance) @istest def from_origin(self): # given origin_input = { 'id': 9, 'type': 'ftp', 'url': 'rsync://ftp.gnu.org/gnu/octave', 'project': None, 'lister': None, } expected_origin = { 'id': 9, 'type': 'ftp', 'url': 'rsync://ftp.gnu.org/gnu/octave', } # when actual_origin = converters.from_origin(origin_input) # then self.assertEqual(actual_origin, expected_origin) @istest def from_release(self): release_input = { 'id': hashutil.hash_to_bytes( 'aad23fa492a0c5fed0708a6703be875448c86884'), 'target': hashutil.hash_to_bytes( '5e46d564378afc44b31bb89f99d5675195fbdf67'), 'target_type': 'revision', 'date': { 'timestamp': datetime.datetime( 2015, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': False, }, 'author': { 'name': b'author name', 'fullname': b'Author Name author@email', 'email': b'author@email', }, 'name': b'v0.0.1', 'message': b'some comment on release', 'synthetic': True, } expected_release = { 'id': 'aad23fa492a0c5fed0708a6703be875448c86884', 'target': '5e46d564378afc44b31bb89f99d5675195fbdf67', 'target_type': 'revision', 'date': '2015-01-01T22:00:00+00:00', 'author': { 'name': 'author name', 'fullname': 'Author Name author@email', 'email': 'author@email', }, 'name': 'v0.0.1', 'message': 'some comment on release', 'target_type': 'revision', 'synthetic': True, } # when actual_release = converters.from_release(release_input) # then self.assertEqual(actual_release, expected_release) @istest def from_release_no_revision(self): release_input = { 'id': hashutil.hash_to_bytes( 'b2171ee2bdf119cd99a7ec7eff32fa8013ef9a4e'), 'target': None, 'date': { 'timestamp': datetime.datetime( 2016, 3, 2, 10, 0, 0, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': True, }, 'name': b'v0.1.1', 'message': b'comment on release', 'synthetic': False, 'author': { 'name': b'bob', 'fullname': b'Bob bob@alice.net', 'email': b'bob@alice.net', }, } expected_release = { 'id': 'b2171ee2bdf119cd99a7ec7eff32fa8013ef9a4e', 'target': None, 'date': '2016-03-02T10:00:00-00:00', 'name': 'v0.1.1', 'message': 'comment on release', 'synthetic': False, 'author': { 'name': 'bob', 'fullname': 'Bob bob@alice.net', 'email': 'bob@alice.net', }, } # when actual_release = converters.from_release(release_input) # then self.assertEqual(actual_release, expected_release) @istest def from_revision(self): revision_input = { 'id': hashutil.hash_to_bytes( '18d8be353ed3480476f032475e7c233eff7371d5'), 'directory': hashutil.hash_to_bytes( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'Software Heritage', 'fullname': b'robot robot@softwareheritage.org', 'email': b'robot@softwareheritage.org', }, 'committer': { 'name': b'Software Heritage', 'fullname': b'robot robot@softwareheritage.org', 'email': b'robot@softwareheritage.org', }, 'message': b'synthetic revision message', 'date': { 'timestamp': datetime.datetime( 2000, 1, 17, 11, 23, 54, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': False, }, 'committer_date': { 'timestamp': datetime.datetime( 2000, 1, 17, 11, 23, 54, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': False, }, 'synthetic': True, 'type': 'tar', 'parents': [ hashutil.hash_to_bytes( '29d8be353ed3480476f032475e7c244eff7371d5'), hashutil.hash_to_bytes( '30d8be353ed3480476f032475e7c244eff7371d5') ], 'children': [ hashutil.hash_to_bytes( '123546353ed3480476f032475e7c244eff7371d5'), ], 'metadata': { 'extra_headers': [['gpgsig', b'some-signature']], 'original_artifact': [{ 'archive_type': 'tar', 'name': 'webbase-5.7.0.tar.gz', 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' '309d36484e7edf7bb912', }] }, } expected_revision = { 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'author': { 'name': 'Software Heritage', 'fullname': 'robot robot@softwareheritage.org', 'email': 'robot@softwareheritage.org', }, 'committer': { 'name': 'Software Heritage', 'fullname': 'robot robot@softwareheritage.org', 'email': 'robot@softwareheritage.org', }, 'message': 'synthetic revision message', 'date': "2000-01-17T11:23:54+00:00", 'committer_date': "2000-01-17T11:23:54+00:00", 'children': [ '123546353ed3480476f032475e7c244eff7371d5' ], 'parents': [ '29d8be353ed3480476f032475e7c244eff7371d5', '30d8be353ed3480476f032475e7c244eff7371d5' ], 'type': 'tar', 'synthetic': True, 'metadata': { 'extra_headers': [['gpgsig', 'some-signature']], 'original_artifact': [{ 'archive_type': 'tar', 'name': 'webbase-5.7.0.tar.gz', 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' '309d36484e7edf7bb912' }] }, 'merge': True } # when actual_revision = converters.from_revision(revision_input) # then self.assertEqual(actual_revision, expected_revision) @istest def from_revision_nomerge(self): revision_input = { 'id': hashutil.hash_to_bytes( '18d8be353ed3480476f032475e7c233eff7371d5'), 'parents': [ hashutil.hash_to_bytes( '29d8be353ed3480476f032475e7c244eff7371d5') ] } expected_revision = { 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'parents': [ '29d8be353ed3480476f032475e7c244eff7371d5' ], 'merge': False } # when actual_revision = converters.from_revision(revision_input) # then self.assertEqual(actual_revision, expected_revision) @istest def from_revision_noparents(self): revision_input = { 'id': hashutil.hash_to_bytes( '18d8be353ed3480476f032475e7c233eff7371d5'), 'directory': hashutil.hash_to_bytes( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'Software Heritage', 'fullname': b'robot robot@softwareheritage.org', 'email': b'robot@softwareheritage.org', }, 'committer': { 'name': b'Software Heritage', 'fullname': b'robot robot@softwareheritage.org', 'email': b'robot@softwareheritage.org', }, 'message': b'synthetic revision message', 'date': { 'timestamp': datetime.datetime( 2000, 1, 17, 11, 23, 54, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': False, }, 'committer_date': { 'timestamp': datetime.datetime( 2000, 1, 17, 11, 23, 54, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': False, }, 'synthetic': True, 'type': 'tar', 'children': [ hashutil.hash_to_bytes( '123546353ed3480476f032475e7c244eff7371d5'), ], 'metadata': { 'original_artifact': [{ 'archive_type': 'tar', 'name': 'webbase-5.7.0.tar.gz', 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' '309d36484e7edf7bb912', }] }, } expected_revision = { 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'author': { 'name': 'Software Heritage', 'fullname': 'robot robot@softwareheritage.org', 'email': 'robot@softwareheritage.org', }, 'committer': { 'name': 'Software Heritage', 'fullname': 'robot robot@softwareheritage.org', 'email': 'robot@softwareheritage.org', }, 'message': 'synthetic revision message', 'date': "2000-01-17T11:23:54+00:00", 'committer_date': "2000-01-17T11:23:54+00:00", 'children': [ '123546353ed3480476f032475e7c244eff7371d5' ], 'type': 'tar', 'synthetic': True, 'metadata': { 'original_artifact': [{ 'archive_type': 'tar', 'name': 'webbase-5.7.0.tar.gz', 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' '309d36484e7edf7bb912' }] } } # when actual_revision = converters.from_revision(revision_input) # then self.assertEqual(actual_revision, expected_revision) @istest def from_revision_invalid(self): revision_input = { 'id': hashutil.hash_to_bytes( '18d8be353ed3480476f032475e7c233eff7371d5'), 'directory': hashutil.hash_to_bytes( '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'), 'author': { 'name': b'Software Heritage', 'fullname': b'robot robot@softwareheritage.org', 'email': b'robot@softwareheritage.org', }, 'committer': { 'name': b'Software Heritage', 'fullname': b'robot robot@softwareheritage.org', 'email': b'robot@softwareheritage.org', }, 'message': b'invalid message \xff', 'date': { 'timestamp': datetime.datetime( 2000, 1, 17, 11, 23, 54, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': False, }, 'committer_date': { 'timestamp': datetime.datetime( 2000, 1, 17, 11, 23, 54, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': False, }, 'synthetic': True, 'type': 'tar', 'parents': [ hashutil.hash_to_bytes( '29d8be353ed3480476f032475e7c244eff7371d5'), hashutil.hash_to_bytes( '30d8be353ed3480476f032475e7c244eff7371d5') ], 'children': [ hashutil.hash_to_bytes( '123546353ed3480476f032475e7c244eff7371d5'), ], 'metadata': { 'original_artifact': [{ 'archive_type': 'tar', 'name': 'webbase-5.7.0.tar.gz', 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' '309d36484e7edf7bb912', }] }, } expected_revision = { 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'author': { 'name': 'Software Heritage', 'fullname': 'robot robot@softwareheritage.org', 'email': 'robot@softwareheritage.org', }, 'committer': { 'name': 'Software Heritage', 'fullname': 'robot robot@softwareheritage.org', 'email': 'robot@softwareheritage.org', }, 'message': None, 'message_decoding_failed': True, 'date': "2000-01-17T11:23:54+00:00", 'committer_date': "2000-01-17T11:23:54+00:00", 'children': [ '123546353ed3480476f032475e7c244eff7371d5' ], 'parents': [ '29d8be353ed3480476f032475e7c244eff7371d5', '30d8be353ed3480476f032475e7c244eff7371d5' ], 'type': 'tar', 'synthetic': True, 'metadata': { 'original_artifact': [{ 'archive_type': 'tar', 'name': 'webbase-5.7.0.tar.gz', 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' '309d36484e7edf7bb912' }] }, 'merge': True } # when actual_revision = converters.from_revision(revision_input) # then self.assertEqual(actual_revision, expected_revision) @istest def from_content_none(self): self.assertIsNone(converters.from_content(None)) @istest def from_content(self): content_input = { 'sha1': hashutil.hash_to_bytes( '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5'), 'sha256': hashutil.hash_to_bytes( '39007420ca5de7cb3cfc15196335507e' 'e76c98930e7e0afa4d2747d3bf96c926'), 'blake2s256': hashutil.hash_to_bytes( '49007420ca5de7cb3cfc15196335507e' 'e76c98930e7e0afa4d2747d3bf96c926'), 'sha1_git': hashutil.hash_to_bytes( '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'), 'ctime': 'something-which-is-filtered-out', 'data': b'data in bytes', 'length': 10, 'status': 'hidden', } # 'status' is filtered expected_content = { 'sha1': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5', 'sha256': '39007420ca5de7cb3cfc15196335507ee76c98930e7e0afa4d274' '7d3bf96c926', 'blake2s256': '49007420ca5de7cb3cfc15196335507ee76c98930e7e0afa4d2' '747d3bf96c926', 'sha1_git': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'data': b'data in bytes', 'length': 10, 'status': 'absent', } # when actual_content = converters.from_content(content_input) # then self.assertEqual(actual_content, expected_content) @istest def from_person(self): person_input = { 'id': 10, 'anything': 'else', 'name': b'bob', 'fullname': b'bob bob@alice.net', 'email': b'bob@foo.alice', } expected_person = { 'id': 10, 'anything': 'else', 'name': 'bob', 'fullname': 'bob bob@alice.net', 'email': 'bob@foo.alice', } # when actual_person = converters.from_person(person_input) # then self.assertEqual(actual_person, expected_person) @istest def from_directory_entries(self): dir_entries_input = { 'sha1': hashutil.hash_to_bytes( '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5'), 'sha256': hashutil.hash_to_bytes( '39007420ca5de7cb3cfc15196335507e' 'e76c98930e7e0afa4d2747d3bf96c926'), 'sha1_git': hashutil.hash_to_bytes( '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'), 'target': hashutil.hash_to_bytes( '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'), 'dir_id': hashutil.hash_to_bytes( '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'), 'name': b'bob', 'type': 10, 'status': 'hidden', } expected_dir_entries = { 'sha1': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5', 'sha256': '39007420ca5de7cb3cfc15196335507ee76c98930e7e0afa4d2747' 'd3bf96c926', 'sha1_git': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'target': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'dir_id': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'name': 'bob', 'type': 10, 'status': 'absent', } # when actual_dir_entries = converters.from_directory_entry(dir_entries_input) # then self.assertEqual(actual_dir_entries, expected_dir_entries) @istest def from_filetype(self): content_filetype = { 'id': hashutil.hash_to_bytes( '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5'), 'encoding': b'utf-8', 'mimetype': b'text/plain', } expected_content_filetype = { 'id': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5', 'encoding': 'utf-8', 'mimetype': 'text/plain', } # when actual_content_filetype = converters.from_filetype(content_filetype) # then self.assertEqual(actual_content_filetype, expected_content_filetype) diff --git a/swh/web/ui/tests/test_query.py b/swh/web/api/tests/test_query.py similarity index 95% rename from swh/web/ui/tests/test_query.py rename to swh/web/api/tests/test_query.py index 091ffcd8..cebc2b17 100644 --- a/swh/web/ui/tests/test_query.py +++ b/swh/web/api/tests/test_query.py @@ -1,141 +1,142 @@ # Copyright (C) 2015-2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import unittest from unittest.mock import patch from nose.tools import istest from swh.model import hashutil -from swh.web.ui import query -from swh.web.ui.exc import BadInputExc + +from swh.web.api import query +from swh.web.api.exc import BadInputExc class QueryTestCase(unittest.TestCase): @istest def parse_hash_malformed_query_with_more_than_2_parts(self): with self.assertRaises(BadInputExc): query.parse_hash('sha1:1234567890987654:other-stuff') @istest def parse_hash_guess_sha1(self): h = 'f1d2d2f924e986ac86fdf7b36c94bcdf32beec15' r = query.parse_hash(h) self.assertEquals(r, ('sha1', hashutil.hash_to_bytes(h))) @istest def parse_hash_guess_sha256(self): h = '084C799CD551DD1D8D5C5F9A5D593B2' \ 'E931F5E36122ee5c793c1d08a19839cc0' r = query.parse_hash(h) self.assertEquals(r, ('sha256', hashutil.hash_to_bytes(h))) @istest def parse_hash_guess_algo_malformed_hash(self): with self.assertRaises(BadInputExc): query.parse_hash('1234567890987654') @istest def parse_hash_check_sha1(self): h = 'f1d2d2f924e986ac86fdf7b36c94bcdf32beec15' r = query.parse_hash('sha1:' + h) self.assertEquals(r, ('sha1', hashutil.hash_to_bytes(h))) @istest def parse_hash_check_sha1_git(self): h = 'e1d2d2f924e986ac86fdf7b36c94bcdf32beec15' r = query.parse_hash('sha1_git:' + h) self.assertEquals(r, ('sha1_git', hashutil.hash_to_bytes(h))) @istest def parse_hash_check_sha256(self): h = '084C799CD551DD1D8D5C5F9A5D593B2E931F5E36122ee5c793c1d08a19839cc0' r = query.parse_hash('sha256:' + h) self.assertEquals(r, ('sha256', hashutil.hash_to_bytes(h))) @istest def parse_hash_check_algo_malformed_sha1_hash(self): with self.assertRaises(BadInputExc): query.parse_hash('sha1:1234567890987654') @istest def parse_hash_check_algo_malformed_sha1_git_hash(self): with self.assertRaises(BadInputExc): query.parse_hash('sha1_git:1234567890987654') @istest def parse_hash_check_algo_malformed_sha256_hash(self): with self.assertRaises(BadInputExc): query.parse_hash('sha256:1234567890987654') @istest def parse_hash_check_algo_unknown_one(self): with self.assertRaises(BadInputExc): query.parse_hash('sha2:1234567890987654') - @patch('swh.web.ui.query.parse_hash') + @patch('swh.web.api.query.parse_hash') @istest def parse_hash_with_algorithms_or_throws_bad_query(self, mock_hash): # given mock_hash.side_effect = BadInputExc('Error input') # when with self.assertRaises(BadInputExc) as cm: query.parse_hash_with_algorithms_or_throws( 'sha1:blah', ['sha1'], 'useless error message for this use case') self.assertIn('Error input', cm.exception.args[0]) mock_hash.assert_called_once_with('sha1:blah') - @patch('swh.web.ui.query.parse_hash') + @patch('swh.web.api.query.parse_hash') @istest def parse_hash_with_algorithms_or_throws_bad_algo(self, mock_hash): # given mock_hash.return_value = 'sha1', '123' # when with self.assertRaises(BadInputExc) as cm: query.parse_hash_with_algorithms_or_throws( 'sha1:431', ['sha1_git'], 'Only sha1_git!') self.assertIn('Only sha1_git!', cm.exception.args[0]) mock_hash.assert_called_once_with('sha1:431') - @patch('swh.web.ui.query.parse_hash') + @patch('swh.web.api.query.parse_hash') @istest def parse_hash_with_algorithms(self, mock_hash): # given mock_hash.return_value = ('sha256', b'123') # when algo, sha = query.parse_hash_with_algorithms_or_throws( 'sha256:123', ['sha256', 'sha1_git'], 'useless error message for this use case') self.assertEquals(algo, 'sha256') self.assertEquals(sha, b'123') mock_hash.assert_called_once_with('sha256:123') @istest def parse_uuid4(self): # when actual_uuid = query.parse_uuid4('7c33636b-8f11-4bda-89d9-ba8b76a42cec') # then self.assertEquals(actual_uuid, '7c33636b-8f11-4bda-89d9-ba8b76a42cec') @istest def parse_uuid4_ko(self): # when with self.assertRaises(BadInputExc) as cm: query.parse_uuid4('7c33636b-8f11-4bda-89d9-ba8b76a42') self.assertIn('badly formed hexadecimal UUID string', cm.exception.args[0]) diff --git a/swh/web/ui/tests/test_service.py b/swh/web/api/tests/test_service.py similarity index 94% rename from swh/web/ui/tests/test_service.py rename to swh/web/api/tests/test_service.py index 672f5b2b..c6bbdabe 100644 --- a/swh/web/ui/tests/test_service.py +++ b/swh/web/api/tests/test_service.py @@ -1,2049 +1,2050 @@ # Copyright (C) 2015-2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime from nose.tools import istest from unittest.mock import MagicMock, patch, call from swh.model.hashutil import hash_to_bytes, hash_to_hex -from swh.web.ui import service -from swh.web.ui.exc import BadInputExc, NotFoundExc -from swh.web.ui.tests import test_app +from .swh_api_testcase import SWHApiTestCase +from swh.web.api import service +from swh.web.api.exc import BadInputExc, NotFoundExc -class ServiceTestCase(test_app.SWHApiTestCase): + +class ServiceTestCase(SWHApiTestCase): def setUp(self): self.SHA1_SAMPLE = '18d8be353ed3480476f032475e7c233eff7371d5' self.SHA1_SAMPLE_BIN = hash_to_bytes(self.SHA1_SAMPLE) self.SHA256_SAMPLE = ('39007420ca5de7cb3cfc15196335507e' 'e76c98930e7e0afa4d2747d3bf96c926') self.SHA256_SAMPLE_BIN = hash_to_bytes(self.SHA256_SAMPLE) self.SHA1GIT_SAMPLE = '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03' self.SHA1GIT_SAMPLE_BIN = hash_to_bytes(self.SHA1GIT_SAMPLE) self.DIRECTORY_ID = '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6' self.DIRECTORY_ID_BIN = hash_to_bytes(self.DIRECTORY_ID) self.AUTHOR_ID_BIN = { 'name': b'author', 'email': b'author@company.org', } self.AUTHOR_ID = { 'name': 'author', 'email': 'author@company.org', } self.COMMITTER_ID_BIN = { 'name': b'committer', 'email': b'committer@corp.org', } self.COMMITTER_ID = { 'name': 'committer', 'email': 'committer@corp.org', } self.SAMPLE_DATE_RAW = { 'timestamp': datetime.datetime( 2000, 1, 17, 11, 23, 54, tzinfo=datetime.timezone.utc, ).timestamp(), 'offset': 0, 'negative_utc': False, } self.SAMPLE_DATE = '2000-01-17T11:23:54+00:00' self.SAMPLE_MESSAGE_BIN = b'elegant fix for bug 31415957' self.SAMPLE_MESSAGE = 'elegant fix for bug 31415957' self.SAMPLE_REVISION = { 'id': self.SHA1_SAMPLE, 'directory': self.DIRECTORY_ID, 'author': self.AUTHOR_ID, 'committer': self.COMMITTER_ID, 'message': self.SAMPLE_MESSAGE, 'date': self.SAMPLE_DATE, 'committer_date': self.SAMPLE_DATE, 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': {}, 'merge': False } self.SAMPLE_REVISION_RAW = { 'id': self.SHA1_SAMPLE_BIN, 'directory': self.DIRECTORY_ID_BIN, 'author': self.AUTHOR_ID_BIN, 'committer': self.COMMITTER_ID_BIN, 'message': self.SAMPLE_MESSAGE_BIN, 'date': self.SAMPLE_DATE_RAW, 'committer_date': self.SAMPLE_DATE_RAW, 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': [], } self.SAMPLE_CONTENT = { 'sha1': self.SHA1_SAMPLE, 'sha256': self.SHA256_SAMPLE, 'sha1_git': self.SHA1GIT_SAMPLE, 'length': 190, 'status': 'absent' } self.SAMPLE_CONTENT_RAW = { 'sha1': self.SHA1_SAMPLE_BIN, 'sha256': self.SHA256_SAMPLE_BIN, 'sha1_git': self.SHA1GIT_SAMPLE_BIN, 'length': 190, 'status': 'hidden' } self.date_origin_visit1 = datetime.datetime( 2015, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc) self.origin_visit1 = { 'date': self.date_origin_visit1, 'origin': 1, 'visit': 1 } - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest - def lookup_multiple_hashes_ball_missing(self, mock_backend): + def test_lookup_multiple_hashes_ball_missing(self, mock_backend): # given mock_backend.content_missing_per_sha1 = MagicMock(return_value=[]) # when actual_lookup = service.lookup_multiple_hashes( [{'filename': 'a', 'sha1': '456caf10e9535160d90e874b45aa426de762f19f'}, {'filename': 'b', 'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865'}]) # then self.assertEquals(actual_lookup, [ {'filename': 'a', 'sha1': '456caf10e9535160d90e874b45aa426de762f19f', 'found': True}, {'filename': 'b', 'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865', 'found': True} ]) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest - def lookup_multiple_hashes_some_missing(self, mock_backend): + def test_lookup_multiple_hashes_some_missing(self, mock_backend): # given mock_backend.content_missing_per_sha1 = MagicMock(return_value=[ hash_to_bytes('456caf10e9535160d90e874b45aa426de762f19f') ]) # when actual_lookup = service.lookup_multiple_hashes( [{'filename': 'a', 'sha1': '456caf10e9535160d90e874b45aa426de762f19f'}, {'filename': 'b', 'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865'}]) # then self.assertEquals(actual_lookup, [ {'filename': 'a', 'sha1': '456caf10e9535160d90e874b45aa426de762f19f', 'found': False}, {'filename': 'b', 'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865', 'found': True} ]) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_hash_does_not_exist(self, mock_backend): # given mock_backend.content_find = MagicMock(return_value=None) # when actual_lookup = service.lookup_hash( 'sha1_git:123caf10e9535160d90e874b45aa426de762f19f') # then self.assertEquals({'found': None, 'algo': 'sha1_git'}, actual_lookup) # check the function has been called with parameters mock_backend.content_find.assert_called_with( 'sha1_git', hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_hash_exist(self, mock_backend): # given stub_content = { 'sha1': hash_to_bytes( '456caf10e9535160d90e874b45aa426de762f19f') } mock_backend.content_find = MagicMock(return_value=stub_content) # when actual_lookup = service.lookup_hash( 'sha1:456caf10e9535160d90e874b45aa426de762f19f') # then self.assertEquals({'found': stub_content, 'algo': 'sha1'}, actual_lookup) mock_backend.content_find.assert_called_with( 'sha1', hash_to_bytes('456caf10e9535160d90e874b45aa426de762f19f'), ) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def search_hash_does_not_exist(self, mock_backend): # given mock_backend.content_find = MagicMock(return_value=None) # when actual_lookup = service.search_hash( 'sha1_git:123caf10e9535160d90e874b45aa426de762f19f') # then self.assertEquals({'found': False}, actual_lookup) # check the function has been called with parameters mock_backend.content_find.assert_called_with( 'sha1_git', hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def search_hash_exist(self, mock_backend): # given stub_content = { 'sha1': hash_to_bytes( '456caf10e9535160d90e874b45aa426de762f19f') } mock_backend.content_find = MagicMock(return_value=stub_content) # when actual_lookup = service.search_hash( 'sha1:456caf10e9535160d90e874b45aa426de762f19f') # then self.assertEquals({'found': True}, actual_lookup) mock_backend.content_find.assert_called_with( 'sha1', hash_to_bytes('456caf10e9535160d90e874b45aa426de762f19f'), ) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_content_ctags(self, mock_backend): # given mock_backend.content_ctags_get = MagicMock( return_value=[{ 'id': hash_to_bytes( '123caf10e9535160d90e874b45aa426de762f19f'), 'line': 100, 'name': 'hello', 'kind': 'function', 'tool_name': 'ctags', 'tool_version': 'some-version', }]) expected_ctags = [{ 'id': '123caf10e9535160d90e874b45aa426de762f19f', 'line': 100, 'name': 'hello', 'kind': 'function', 'tool_name': 'ctags', 'tool_version': 'some-version', }] # when actual_ctags = list(service.lookup_content_ctags( 'sha1:123caf10e9535160d90e874b45aa426de762f19f')) # then self.assertEqual(actual_ctags, expected_ctags) mock_backend.content_ctags_get.assert_called_with( hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_content_ctags_no_hash(self, mock_backend): # given mock_backend.content_find.return_value = None mock_backend.content_ctags_get = MagicMock( return_value=None) # when actual_ctags = list(service.lookup_content_ctags( 'sha1_git:123caf10e9535160d90e874b45aa426de762f19f')) # then self.assertEqual(actual_ctags, []) mock_backend.content_find.assert_called_once_with( 'sha1_git', hash_to_bytes( '123caf10e9535160d90e874b45aa426de762f19f')) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_content_filetype(self, mock_backend): # given mock_backend.content_filetype_get = MagicMock( return_value={ 'id': hash_to_bytes( '123caf10e9535160d90e874b45aa426de762f19f'), 'mimetype': b'text/x-c++', 'encoding': b'us-ascii', }) expected_filetype = { 'id': '123caf10e9535160d90e874b45aa426de762f19f', 'mimetype': 'text/x-c++', 'encoding': 'us-ascii', } # when actual_filetype = service.lookup_content_filetype( 'sha1:123caf10e9535160d90e874b45aa426de762f19f') # then self.assertEqual(actual_filetype, expected_filetype) mock_backend.content_filetype_get.assert_called_with( hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_content_filetype_2(self, mock_backend): # given mock_backend.content_find = MagicMock( return_value={ 'sha1': hash_to_bytes( '123caf10e9535160d90e874b45aa426de762f19f') } ) mock_backend.content_filetype_get = MagicMock( return_value={ 'id': hash_to_bytes( '123caf10e9535160d90e874b45aa426de762f19f'), 'mimetype': b'text/x-python', 'encoding': b'us-ascii', } ) expected_filetype = { 'id': '123caf10e9535160d90e874b45aa426de762f19f', 'mimetype': 'text/x-python', 'encoding': 'us-ascii', } # when actual_filetype = service.lookup_content_filetype( 'sha1_git:456caf10e9535160d90e874b45aa426de762f19f') # then self.assertEqual(actual_filetype, expected_filetype) mock_backend.content_find( 'sha1_git', hash_to_bytes( '456caf10e9535160d90e874b45aa426de762f19f') ) mock_backend.content_filetype_get.assert_called_with( hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_content_language(self, mock_backend): # given mock_backend.content_language_get = MagicMock( return_value={ 'id': hash_to_bytes( '123caf10e9535160d90e874b45aa426de762f19f'), 'lang': 'python', }) expected_language = { 'id': '123caf10e9535160d90e874b45aa426de762f19f', 'lang': 'python', } # when actual_language = service.lookup_content_language( 'sha1:123caf10e9535160d90e874b45aa426de762f19f') # then self.assertEqual(actual_language, expected_language) mock_backend.content_language_get.assert_called_with( hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_content_language_2(self, mock_backend): # given mock_backend.content_find = MagicMock( return_value={ 'sha1': hash_to_bytes( '123caf10e9535160d90e874b45aa426de762f19f') } ) mock_backend.content_language_get = MagicMock( return_value={ 'id': hash_to_bytes( '123caf10e9535160d90e874b45aa426de762f19f'), 'lang': 'haskell', } ) expected_language = { 'id': '123caf10e9535160d90e874b45aa426de762f19f', 'lang': 'haskell', } # when actual_language = service.lookup_content_language( 'sha1_git:456caf10e9535160d90e874b45aa426de762f19f') # then self.assertEqual(actual_language, expected_language) mock_backend.content_find( 'sha1_git', hash_to_bytes( '456caf10e9535160d90e874b45aa426de762f19f') ) mock_backend.content_language_get.assert_called_with( hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_expression(self, mock_backend): # given mock_backend.content_ctags_search = MagicMock( return_value=[{ 'id': hash_to_bytes( '123caf10e9535160d90e874b45aa426de762f19f'), 'name': 'foobar', 'kind': 'variable', 'lang': 'C', 'line': 10 }]) expected_ctags = [{ 'sha1': '123caf10e9535160d90e874b45aa426de762f19f', 'name': 'foobar', 'kind': 'variable', 'lang': 'C', 'line': 10 }] # when actual_ctags = list(service.lookup_expression( 'foobar', last_sha1='hash', per_page=10)) # then self.assertEqual(actual_ctags, expected_ctags) mock_backend.content_ctags_search.assert_called_with( 'foobar', 'hash', 10) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_expression_no_result(self, mock_backend): # given mock_backend.content_ctags_search = MagicMock( return_value=[]) expected_ctags = [] # when actual_ctags = list(service.lookup_expression( 'barfoo', last_sha1='hash', per_page=10)) # then self.assertEqual(actual_ctags, expected_ctags) mock_backend.content_ctags_search.assert_called_with( 'barfoo', 'hash', 10) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_content_license(self, mock_backend): # given mock_backend.content_license_get = MagicMock( return_value={ 'id': hash_to_bytes( '123caf10e9535160d90e874b45aa426de762f19f'), 'lang': 'python', }) expected_license = { 'id': '123caf10e9535160d90e874b45aa426de762f19f', 'lang': 'python', } # when actual_license = service.lookup_content_license( 'sha1:123caf10e9535160d90e874b45aa426de762f19f') # then self.assertEqual(actual_license, expected_license) mock_backend.content_license_get.assert_called_with( hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_content_license_2(self, mock_backend): # given mock_backend.content_find = MagicMock( return_value={ 'sha1': hash_to_bytes( '123caf10e9535160d90e874b45aa426de762f19f') } ) mock_backend.content_license_get = MagicMock( return_value={ 'id': hash_to_bytes( '123caf10e9535160d90e874b45aa426de762f19f'), 'lang': 'haskell', } ) expected_license = { 'id': '123caf10e9535160d90e874b45aa426de762f19f', 'lang': 'haskell', } # when actual_license = service.lookup_content_license( 'sha1_git:456caf10e9535160d90e874b45aa426de762f19f') # then self.assertEqual(actual_license, expected_license) mock_backend.content_find( 'sha1_git', hash_to_bytes( '456caf10e9535160d90e874b45aa426de762f19f') ) mock_backend.content_license_get.assert_called_with( hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_content_provenance(self, mock_backend): # given mock_backend.content_find_provenance = MagicMock( return_value=(p for p in [{ 'content': hash_to_bytes( '123caf10e9535160d90e874b45aa426de762f19f'), 'revision': hash_to_bytes( '456caf10e9535160d90e874b45aa426de762f19f'), 'origin': 100, 'visit': 1, 'path': b'octavio-3.4.0/octave.html/doc_002dS_005fISREG.html' }])) expected_provenances = [{ 'content': '123caf10e9535160d90e874b45aa426de762f19f', 'revision': '456caf10e9535160d90e874b45aa426de762f19f', 'origin': 100, 'visit': 1, 'path': 'octavio-3.4.0/octave.html/doc_002dS_005fISREG.html' }] # when actual_provenances = service.lookup_content_provenance( 'sha1_git:123caf10e9535160d90e874b45aa426de762f19f') # then self.assertEqual(list(actual_provenances), expected_provenances) mock_backend.content_find_provenance.assert_called_with( 'sha1_git', hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_content_provenance_not_found(self, mock_backend): # given mock_backend.content_find_provenance = MagicMock(return_value=None) # when actual_provenances = service.lookup_content_provenance( 'sha1_git:456caf10e9535160d90e874b45aa426de762f19f') # then self.assertIsNone(actual_provenances) mock_backend.content_find_provenance.assert_called_with( 'sha1_git', hash_to_bytes('456caf10e9535160d90e874b45aa426de762f19f')) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def stat_counters(self, mock_backend): # given input_stats = { "content": 1770830, "directory": 211683, "directory_entry_dir": 209167, "directory_entry_file": 1807094, "directory_entry_rev": 0, "entity": 0, "entity_history": 0, "occurrence": 0, "occurrence_history": 19600, "origin": 1096, "person": 0, "release": 8584, "revision": 7792, "revision_history": 0, "skipped_content": 0 } mock_backend.stat_counters = MagicMock(return_value=input_stats) # when actual_stats = service.stat_counters() # then expected_stats = input_stats self.assertEqual(actual_stats, expected_stats) mock_backend.stat_counters.assert_called_with() - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_origin_visits(self, mock_backend): # given date_origin_visit2 = datetime.datetime( 2013, 7, 1, 20, 0, 0, tzinfo=datetime.timezone.utc) date_origin_visit3 = datetime.datetime( 2015, 1, 1, 21, 0, 0, tzinfo=datetime.timezone.utc) stub_result = [self.origin_visit1, { 'date': date_origin_visit2, 'origin': 1, 'visit': 2, 'target': hash_to_bytes( '65a55bbdf3629f916219feb3dcc7393ded1bc8db'), 'branch': b'master', 'target_type': 'release', 'metadata': None, }, { 'date': date_origin_visit3, 'origin': 1, 'visit': 3 }] mock_backend.lookup_origin_visits.return_value = stub_result # when expected_origin_visits = [{ 'date': self.origin_visit1['date'].isoformat(), 'origin': self.origin_visit1['origin'], 'visit': self.origin_visit1['visit'] }, { 'date': date_origin_visit2.isoformat(), 'origin': 1, 'visit': 2, 'target': '65a55bbdf3629f916219feb3dcc7393ded1bc8db', 'branch': 'master', 'target_type': 'release', 'metadata': {}, }, { 'date': date_origin_visit3.isoformat(), 'origin': 1, 'visit': 3 }] actual_origin_visits = service.lookup_origin_visits(6) # then self.assertEqual(list(actual_origin_visits), expected_origin_visits) mock_backend.lookup_origin_visits.assert_called_once_with( 6, last_visit=None, limit=10) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_origin_visit(self, mock_backend): # given stub_result = self.origin_visit1 mock_backend.lookup_origin_visit.return_value = stub_result expected_origin_visit = { 'date': self.origin_visit1['date'].isoformat(), 'origin': self.origin_visit1['origin'], 'visit': self.origin_visit1['visit'] } # when actual_origin_visit = service.lookup_origin_visit(1, 1) # then self.assertEqual(actual_origin_visit, expected_origin_visit) mock_backend.lookup_origin_visit.assert_called_once_with(1, 1) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_origin(self, mock_backend): # given mock_backend.origin_get = MagicMock(return_value={ 'id': 'origin-id', 'lister': 'uuid-lister', 'project': 'uuid-project', 'url': 'ftp://some/url/to/origin', 'type': 'ftp'}) # when actual_origin = service.lookup_origin({'id': 'origin-id'}) # then self.assertEqual(actual_origin, {'id': 'origin-id', 'lister': 'uuid-lister', 'project': 'uuid-project', 'url': 'ftp://some/url/to/origin', 'type': 'ftp'}) mock_backend.origin_get.assert_called_with({'id': 'origin-id'}) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_release_ko_id_checksum_not_ok_because_not_a_sha1(self, mock_backend): # given mock_backend.release_get = MagicMock() with self.assertRaises(BadInputExc) as cm: # when service.lookup_release('not-a-sha1') self.assertIn('invalid checksum', cm.exception.args[0]) mock_backend.release_get.called = False - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_release_ko_id_checksum_ok_but_not_a_sha1(self, mock_backend): # given mock_backend.release_get = MagicMock() # when with self.assertRaises(BadInputExc) as cm: service.lookup_release( '13c1d34d138ec13b5ebad226dc2528dc7506c956e4646f62d4daf5' '1aea892abe') self.assertIn('sha1_git supported', cm.exception.args[0]) mock_backend.release_get.called = False - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_directory_with_path_not_found(self, mock_backend): # given mock_backend.lookup_directory_with_path = MagicMock(return_value=None) sha1_git = '65a55bbdf3629f916219feb3dcc7393ded1bc8db' # when actual_directory = mock_backend.lookup_directory_with_path( sha1_git, 'some/path/here') self.assertIsNone(actual_directory) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_directory_with_path_found(self, mock_backend): # given sha1_git = '65a55bbdf3629f916219feb3dcc7393ded1bc8db' entry = {'id': 'dir-id', 'type': 'dir', 'name': 'some/path/foo'} mock_backend.lookup_directory_with_path = MagicMock(return_value=entry) # when actual_directory = mock_backend.lookup_directory_with_path( sha1_git, 'some/path/here') self.assertEqual(entry, actual_directory) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_release(self, mock_backend): # given mock_backend.release_get = MagicMock(return_value={ 'id': hash_to_bytes('65a55bbdf3629f916219feb3dcc7393ded1bc8db'), 'target': None, 'date': { 'timestamp': datetime.datetime( 2015, 1, 1, 22, 0, 0, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': True, }, 'name': b'v0.0.1', 'message': b'synthetic release', 'synthetic': True, }) # when actual_release = service.lookup_release( '65a55bbdf3629f916219feb3dcc7393ded1bc8db') # then self.assertEqual(actual_release, { 'id': '65a55bbdf3629f916219feb3dcc7393ded1bc8db', 'target': None, 'date': '2015-01-01T22:00:00-00:00', 'name': 'v0.0.1', 'message': 'synthetic release', 'synthetic': True, }) mock_backend.release_get.assert_called_with( hash_to_bytes('65a55bbdf3629f916219feb3dcc7393ded1bc8db')) @istest def lookup_revision_with_context_ko_not_a_sha1_1(self): # given sha1_git = '13c1d34d138ec13b5ebad226dc2528dc7506c956e4646f62d4' \ 'daf51aea892abe' sha1_git_root = '65a55bbdf3629f916219feb3dcc7393ded1bc8db' # when with self.assertRaises(BadInputExc) as cm: service.lookup_revision_with_context(sha1_git_root, sha1_git) self.assertIn('Only sha1_git is supported', cm.exception.args[0]) @istest def lookup_revision_with_context_ko_not_a_sha1_2(self): # given sha1_git_root = '65a55bbdf3629f916219feb3dcc7393ded1bc8db' sha1_git = '13c1d34d138ec13b5ebad226dc2528dc7506c956e4646f6' \ '2d4daf51aea892abe' # when with self.assertRaises(BadInputExc) as cm: service.lookup_revision_with_context(sha1_git_root, sha1_git) self.assertIn('Only sha1_git is supported', cm.exception.args[0]) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_revision_with_context_ko_sha1_git_does_not_exist( self, mock_backend): # given sha1_git_root = '65a55bbdf3629f916219feb3dcc7393ded1bc8db' sha1_git = '777777bdf3629f916219feb3dcc7393ded1bc8db' sha1_git_bin = hash_to_bytes(sha1_git) mock_backend.revision_get.return_value = None # when with self.assertRaises(NotFoundExc) as cm: service.lookup_revision_with_context(sha1_git_root, sha1_git) self.assertIn('Revision 777777bdf3629f916219feb3dcc7393ded1bc8db' ' not found', cm.exception.args[0]) mock_backend.revision_get.assert_called_once_with( sha1_git_bin) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_revision_with_context_ko_root_sha1_git_does_not_exist( self, mock_backend): # given sha1_git_root = '65a55bbdf3629f916219feb3dcc7393ded1bc8db' sha1_git = '777777bdf3629f916219feb3dcc7393ded1bc8db' sha1_git_root_bin = hash_to_bytes(sha1_git_root) sha1_git_bin = hash_to_bytes(sha1_git) mock_backend.revision_get.side_effect = ['foo', None] # when with self.assertRaises(NotFoundExc) as cm: service.lookup_revision_with_context(sha1_git_root, sha1_git) self.assertIn('Revision 65a55bbdf3629f916219feb3dcc7393ded1bc8db' ' not found', cm.exception.args[0]) mock_backend.revision_get.assert_has_calls([call(sha1_git_bin), call(sha1_git_root_bin)]) - @patch('swh.web.ui.service.backend') - @patch('swh.web.ui.service.query') + @patch('swh.web.api.service.backend') + @patch('swh.web.api.service.query') @istest def lookup_revision_with_context(self, mock_query, mock_backend): # given sha1_git_root = '666' sha1_git = '883' sha1_git_root_bin = b'666' sha1_git_bin = b'883' sha1_git_root_dict = { 'id': sha1_git_root_bin, 'parents': [b'999'], } sha1_git_dict = { 'id': sha1_git_bin, 'parents': [], 'directory': b'278', } stub_revisions = [ sha1_git_root_dict, { 'id': b'999', 'parents': [b'777', b'883', b'888'], }, { 'id': b'777', 'parents': [b'883'], }, sha1_git_dict, { 'id': b'888', 'parents': [b'889'], }, { 'id': b'889', 'parents': [], }, ] # inputs ok mock_query.parse_hash_with_algorithms_or_throws.side_effect = [ ('sha1', sha1_git_bin), ('sha1', sha1_git_root_bin) ] # lookup revision first 883, then 666 (both exists) mock_backend.revision_get.side_effect = [ sha1_git_dict, sha1_git_root_dict ] mock_backend.revision_log = MagicMock( return_value=stub_revisions) # when actual_revision = service.lookup_revision_with_context( sha1_git_root, sha1_git) # then self.assertEquals(actual_revision, { 'id': hash_to_hex(sha1_git_bin), 'parents': [], 'children': [hash_to_hex(b'999'), hash_to_hex(b'777')], 'directory': hash_to_hex(b'278'), 'merge': False }) mock_query.parse_hash_with_algorithms_or_throws.assert_has_calls( [call(sha1_git, ['sha1'], 'Only sha1_git is supported.'), call(sha1_git_root, ['sha1'], 'Only sha1_git is supported.')]) mock_backend.revision_log.assert_called_with( sha1_git_root_bin, 100) - @patch('swh.web.ui.service.backend') - @patch('swh.web.ui.service.query') + @patch('swh.web.api.service.backend') + @patch('swh.web.api.service.query') @istest def lookup_revision_with_context_sha1_git_root_already_retrieved_as_dict( self, mock_query, mock_backend): # given sha1_git = '883' sha1_git_root_bin = b'666' sha1_git_bin = b'883' sha1_git_root_dict = { 'id': sha1_git_root_bin, 'parents': [b'999'], } sha1_git_dict = { 'id': sha1_git_bin, 'parents': [], 'directory': b'278', } stub_revisions = [ sha1_git_root_dict, { 'id': b'999', 'parents': [b'777', b'883', b'888'], }, { 'id': b'777', 'parents': [b'883'], }, sha1_git_dict, { 'id': b'888', 'parents': [b'889'], }, { 'id': b'889', 'parents': [], }, ] # inputs ok mock_query.parse_hash_with_algorithms_or_throws.return_value = ( 'sha1', sha1_git_bin) # lookup only on sha1 mock_backend.revision_get.return_value = sha1_git_dict mock_backend.revision_log.return_value = stub_revisions # when actual_revision = service.lookup_revision_with_context( {'id': sha1_git_root_bin}, sha1_git) # then self.assertEquals(actual_revision, { 'id': hash_to_hex(sha1_git_bin), 'parents': [], 'children': [hash_to_hex(b'999'), hash_to_hex(b'777')], 'directory': hash_to_hex(b'278'), 'merge': False }) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with( # noqa sha1_git, ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(sha1_git_bin) mock_backend.revision_log.assert_called_with( sha1_git_root_bin, 100) - @patch('swh.web.ui.service.backend') - @patch('swh.web.ui.service.query') + @patch('swh.web.api.service.backend') + @patch('swh.web.api.service.query') @istest def lookup_directory_with_revision_ko_revision_not_found(self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') mock_backend.revision_get.return_value = None # when with self.assertRaises(NotFoundExc) as cm: service.lookup_directory_with_revision('123') self.assertIn('Revision 123 not found', cm.exception.args[0]) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') - @patch('swh.web.ui.service.backend') - @patch('swh.web.ui.service.query') + @patch('swh.web.api.service.backend') + @patch('swh.web.api.service.query') @istest def lookup_directory_with_revision_ko_revision_with_path_to_nowhere( self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') dir_id = b'dir-id-as-sha1' mock_backend.revision_get.return_value = { 'directory': dir_id, } mock_backend.directory_entry_get_by_path.return_value = None # when with self.assertRaises(NotFoundExc) as cm: service.lookup_directory_with_revision( '123', 'path/to/something/unknown') self.assertIn("Directory/File 'path/to/something/unknown' " + "pointed to by revision 123 not found", cm.exception.args[0]) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') mock_backend.directory_entry_get_by_path.assert_called_once_with( b'dir-id-as-sha1', 'path/to/something/unknown') - @patch('swh.web.ui.service.backend') - @patch('swh.web.ui.service.query') + @patch('swh.web.api.service.backend') + @patch('swh.web.api.service.query') @istest def lookup_directory_with_revision_ko_type_not_implemented( self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') dir_id = b'dir-id-as-sha1' mock_backend.revision_get.return_value = { 'directory': dir_id, } mock_backend.directory_entry_get_by_path.return_value = { 'type': 'rev', 'name': b'some/path/to/rev', 'target': b'456' } stub_content = { 'id': b'12', 'type': 'file' } mock_backend.content_get.return_value = stub_content # when with self.assertRaises(NotImplementedError) as cm: service.lookup_directory_with_revision( '123', 'some/path/to/rev') self.assertIn("Entity of type 'rev' not implemented.", cm.exception.args[0]) # then mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') mock_backend.directory_entry_get_by_path.assert_called_once_with( b'dir-id-as-sha1', 'some/path/to/rev') - @patch('swh.web.ui.service.backend') - @patch('swh.web.ui.service.query') + @patch('swh.web.api.service.backend') + @patch('swh.web.api.service.query') @istest def lookup_directory_with_revision_revision_without_path(self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') dir_id = b'dir-id-as-sha1' mock_backend.revision_get.return_value = { 'directory': dir_id, } stub_dir_entries = [{ 'id': b'123', 'type': 'dir' }, { 'id': b'456', 'type': 'file' }] mock_backend.directory_ls.return_value = stub_dir_entries # when actual_directory_entries = service.lookup_directory_with_revision( '123') self.assertEqual(actual_directory_entries['type'], 'dir') self.assertEqual(list(actual_directory_entries['content']), stub_dir_entries) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') mock_backend.directory_ls.assert_called_once_with(dir_id) - @patch('swh.web.ui.service.backend') - @patch('swh.web.ui.service.query') + @patch('swh.web.api.service.backend') + @patch('swh.web.api.service.query') @istest def lookup_directory_with_revision_revision_with_path_to_dir(self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') dir_id = b'dir-id-as-sha1' mock_backend.revision_get.return_value = { 'directory': dir_id, } stub_dir_entries = [{ 'id': b'12', 'type': 'dir' }, { 'id': b'34', 'type': 'file' }] mock_backend.directory_entry_get_by_path.return_value = { 'type': 'dir', 'name': b'some/path', 'target': b'456' } mock_backend.directory_ls.return_value = stub_dir_entries # when actual_directory_entries = service.lookup_directory_with_revision( '123', 'some/path') self.assertEqual(actual_directory_entries['type'], 'dir') self.assertEqual(actual_directory_entries['revision'], '123') self.assertEqual(actual_directory_entries['path'], 'some/path') self.assertEqual(list(actual_directory_entries['content']), stub_dir_entries) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') mock_backend.directory_entry_get_by_path.assert_called_once_with( dir_id, 'some/path') mock_backend.directory_ls.assert_called_once_with(b'456') - @patch('swh.web.ui.service.backend') - @patch('swh.web.ui.service.query') + @patch('swh.web.api.service.backend') + @patch('swh.web.api.service.query') @istest def lookup_directory_with_revision_revision_with_path_to_file_without_data( self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') dir_id = b'dir-id-as-sha1' mock_backend.revision_get.return_value = { 'directory': dir_id, } mock_backend.directory_entry_get_by_path.return_value = { 'type': 'file', 'name': b'some/path/to/file', 'target': b'789' } stub_content = { 'status': 'visible', } mock_backend.content_find.return_value = stub_content # when actual_content = service.lookup_directory_with_revision( '123', 'some/path/to/file') # then self.assertEqual(actual_content, {'type': 'file', 'revision': '123', 'path': 'some/path/to/file', 'content': stub_content}) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') mock_backend.directory_entry_get_by_path.assert_called_once_with( b'dir-id-as-sha1', 'some/path/to/file') mock_backend.content_find.assert_called_once_with('sha1_git', b'789') - @patch('swh.web.ui.service.backend') - @patch('swh.web.ui.service.query') + @patch('swh.web.api.service.backend') + @patch('swh.web.api.service.query') @istest def lookup_directory_with_revision_revision_with_path_to_file_with_data( self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1', b'123') dir_id = b'dir-id-as-sha1' mock_backend.revision_get.return_value = { 'directory': dir_id, } mock_backend.directory_entry_get_by_path.return_value = { 'type': 'file', 'name': b'some/path/to/file', 'target': b'789' } stub_content = { 'status': 'visible', 'sha1': b'content-sha1' } mock_backend.content_find.return_value = stub_content mock_backend.content_get.return_value = { 'sha1': b'content-sha1', 'data': b'some raw data' } expected_content = { 'status': 'visible', 'sha1': hash_to_hex(b'content-sha1'), 'data': b'some raw data' } # when actual_content = service.lookup_directory_with_revision( '123', 'some/path/to/file', with_data=True) # then self.assertEqual(actual_content, {'type': 'file', 'revision': '123', 'path': 'some/path/to/file', 'content': expected_content}) mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with ('123', ['sha1'], 'Only sha1_git is supported.') mock_backend.revision_get.assert_called_once_with(b'123') mock_backend.directory_entry_get_by_path.assert_called_once_with( b'dir-id-as-sha1', 'some/path/to/file') mock_backend.content_find.assert_called_once_with('sha1_git', b'789') mock_backend.content_get.assert_called_once_with(b'content-sha1') - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_revision(self, mock_backend): # given mock_backend.revision_get = MagicMock( return_value=self.SAMPLE_REVISION_RAW) # when actual_revision = service.lookup_revision( self.SHA1_SAMPLE) # then self.assertEqual(actual_revision, self.SAMPLE_REVISION) mock_backend.revision_get.assert_called_with( self.SHA1_SAMPLE_BIN) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_revision_invalid_msg(self, mock_backend): # given stub_rev = self.SAMPLE_REVISION_RAW stub_rev['message'] = b'elegant fix for bug \xff' expected_revision = self.SAMPLE_REVISION expected_revision['message'] = None expected_revision['message_decoding_failed'] = True mock_backend.revision_get = MagicMock(return_value=stub_rev) # when actual_revision = service.lookup_revision( self.SHA1_SAMPLE) # then self.assertEqual(actual_revision, expected_revision) mock_backend.revision_get.assert_called_with( self.SHA1_SAMPLE_BIN) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_revision_msg_ok(self, mock_backend): # given mock_backend.revision_get.return_value = self.SAMPLE_REVISION_RAW # when rv = service.lookup_revision_message( self.SHA1_SAMPLE) # then self.assertEquals(rv, {'message': self.SAMPLE_MESSAGE_BIN}) mock_backend.revision_get.assert_called_with( self.SHA1_SAMPLE_BIN) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_revision_msg_absent(self, mock_backend): # given stub_revision = self.SAMPLE_REVISION_RAW del stub_revision['message'] mock_backend.revision_get.return_value = stub_revision # when with self.assertRaises(NotFoundExc) as cm: service.lookup_revision_message( self.SHA1_SAMPLE) # then mock_backend.revision_get.assert_called_with( self.SHA1_SAMPLE_BIN) self.assertEqual(cm.exception.args[0], 'No message for revision ' 'with sha1_git ' '18d8be353ed3480476f032475e7c233eff7371d5.') - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_revision_msg_norev(self, mock_backend): # given mock_backend.revision_get.return_value = None # when with self.assertRaises(NotFoundExc) as cm: service.lookup_revision_message( self.SHA1_SAMPLE) # then mock_backend.revision_get.assert_called_with( self.SHA1_SAMPLE_BIN) self.assertEqual(cm.exception.args[0], 'Revision with sha1_git ' '18d8be353ed3480476f032475e7c233eff7371d5 ' 'not found.') - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_revision_multiple(self, mock_backend): # given sha1 = self.SHA1_SAMPLE sha1_other = 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc' stub_revisions = [ self.SAMPLE_REVISION_RAW, { 'id': hash_to_bytes(sha1_other), 'directory': 'abcdbe353ed3480476f032475e7c233eff7371d5', 'author': { 'name': b'name', 'email': b'name@surname.org', }, 'committer': { 'name': b'name', 'email': b'name@surname.org', }, 'message': b'ugly fix for bug 42', 'date': { 'timestamp': datetime.datetime( 2000, 1, 12, 5, 23, 54, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': False }, 'date_offset': 0, 'committer_date': { 'timestamp': datetime.datetime( 2000, 1, 12, 5, 23, 54, tzinfo=datetime.timezone.utc).timestamp(), 'offset': 0, 'negative_utc': False }, 'committer_date_offset': 0, 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': [], } ] mock_backend.revision_get_multiple.return_value = stub_revisions # when actual_revisions = service.lookup_revision_multiple( [sha1, sha1_other]) # then self.assertEqual(list(actual_revisions), [ self.SAMPLE_REVISION, { 'id': sha1_other, 'directory': 'abcdbe353ed3480476f032475e7c233eff7371d5', 'author': { 'name': 'name', 'email': 'name@surname.org', }, 'committer': { 'name': 'name', 'email': 'name@surname.org', }, 'message': 'ugly fix for bug 42', 'date': '2000-01-12T05:23:54+00:00', 'date_offset': 0, 'committer_date': '2000-01-12T05:23:54+00:00', 'committer_date_offset': 0, 'synthetic': False, 'type': 'git', 'parents': [], 'metadata': {}, 'merge': False } ]) self.assertEqual( list(mock_backend.revision_get_multiple.call_args[0][0]), [hash_to_bytes(sha1), hash_to_bytes(sha1_other)]) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_revision_multiple_none_found(self, mock_backend): # given sha1_bin = self.SHA1_SAMPLE sha1_other = 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc' mock_backend.revision_get_multiple.return_value = [] # then actual_revisions = service.lookup_revision_multiple( [sha1_bin, sha1_other]) self.assertEqual(list(actual_revisions), []) self.assertEqual( list(mock_backend.revision_get_multiple.call_args[0][0]), [hash_to_bytes(self.SHA1_SAMPLE), hash_to_bytes(sha1_other)]) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_revision_log(self, mock_backend): # given stub_revision_log = [self.SAMPLE_REVISION_RAW] mock_backend.revision_log = MagicMock(return_value=stub_revision_log) # when actual_revision = service.lookup_revision_log( 'abcdbe353ed3480476f032475e7c233eff7371d5', limit=25) # then self.assertEqual(list(actual_revision), [self.SAMPLE_REVISION]) mock_backend.revision_log.assert_called_with( hash_to_bytes('abcdbe353ed3480476f032475e7c233eff7371d5'), 25) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_revision_log_by(self, mock_backend): # given stub_revision_log = [self.SAMPLE_REVISION_RAW] mock_backend.revision_log_by = MagicMock( return_value=stub_revision_log) # when actual_log = service.lookup_revision_log_by( 1, 'refs/heads/master', None, limit=100) # then self.assertEqual(list(actual_log), [self.SAMPLE_REVISION]) mock_backend.revision_log_by.assert_called_with( 1, 'refs/heads/master', None, 100) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_revision_log_by_nolog(self, mock_backend): # given mock_backend.revision_log_by = MagicMock(return_value=None) # when res = service.lookup_revision_log_by( 1, 'refs/heads/master', None, limit=100) # then self.assertEquals(res, None) mock_backend.revision_log_by.assert_called_with( 1, 'refs/heads/master', None, 100) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_content_raw_not_found(self, mock_backend): # given mock_backend.content_find = MagicMock(return_value=None) # when actual_content = service.lookup_content_raw( 'sha1:18d8be353ed3480476f032475e7c233eff7371d5') # then self.assertIsNone(actual_content) mock_backend.content_find.assert_called_with( 'sha1', hash_to_bytes(self.SHA1_SAMPLE)) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_content_raw(self, mock_backend): # given mock_backend.content_find = MagicMock(return_value={ 'sha1': self.SHA1_SAMPLE, }) mock_backend.content_get = MagicMock(return_value={ 'data': b'binary data'}) # when actual_content = service.lookup_content_raw( 'sha256:%s' % self.SHA256_SAMPLE) # then self.assertEquals(actual_content, {'data': b'binary data'}) mock_backend.content_find.assert_called_once_with( 'sha256', self.SHA256_SAMPLE_BIN) mock_backend.content_get.assert_called_once_with( self.SHA1_SAMPLE) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_content_not_found(self, mock_backend): # given mock_backend.content_find = MagicMock(return_value=None) # when actual_content = service.lookup_content( 'sha1:%s' % self.SHA1_SAMPLE) # then self.assertIsNone(actual_content) mock_backend.content_find.assert_called_with( 'sha1', self.SHA1_SAMPLE_BIN) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_content_with_sha1(self, mock_backend): # given mock_backend.content_find = MagicMock( return_value=self.SAMPLE_CONTENT_RAW) # when actual_content = service.lookup_content( 'sha1:%s' % self.SHA1_SAMPLE) # then self.assertEqual(actual_content, self.SAMPLE_CONTENT) mock_backend.content_find.assert_called_with( 'sha1', hash_to_bytes(self.SHA1_SAMPLE)) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_content_with_sha256(self, mock_backend): # given stub_content = self.SAMPLE_CONTENT_RAW stub_content['status'] = 'visible' expected_content = self.SAMPLE_CONTENT expected_content['status'] = 'visible' mock_backend.content_find = MagicMock( return_value=stub_content) # when actual_content = service.lookup_content( 'sha256:%s' % self.SHA256_SAMPLE) # then self.assertEqual(actual_content, expected_content) mock_backend.content_find.assert_called_with( 'sha256', self.SHA256_SAMPLE_BIN) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_person(self, mock_backend): # given mock_backend.person_get = MagicMock(return_value={ 'id': 'person_id', 'name': b'some_name', 'email': b'some-email', }) # when actual_person = service.lookup_person('person_id') # then self.assertEqual(actual_person, { 'id': 'person_id', 'name': 'some_name', 'email': 'some-email', }) mock_backend.person_get.assert_called_with('person_id') - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_directory_bad_checksum(self, mock_backend): # given mock_backend.directory_ls = MagicMock() # when with self.assertRaises(BadInputExc): service.lookup_directory('directory_id') # then mock_backend.directory_ls.called = False - @patch('swh.web.ui.service.backend') - @patch('swh.web.ui.service.query') + @patch('swh.web.api.service.backend') + @patch('swh.web.api.service.query') @istest def lookup_directory_not_found(self, mock_query, mock_backend): # given mock_query.parse_hash_with_algorithms_or_throws.return_value = ( 'sha1', 'directory-id-bin') mock_backend.directory_get.return_value = None # when actual_dir = service.lookup_directory('directory_id') # then self.assertIsNone(actual_dir) mock_query.parse_hash_with_algorithms_or_throws.assert_called_with( 'directory_id', ['sha1'], 'Only sha1_git is supported.') mock_backend.directory_get.assert_called_with('directory-id-bin') mock_backend.directory_ls.called = False - @patch('swh.web.ui.service.backend') - @patch('swh.web.ui.service.query') + @patch('swh.web.api.service.backend') + @patch('swh.web.api.service.query') @istest def lookup_directory(self, mock_query, mock_backend): mock_query.parse_hash_with_algorithms_or_throws.return_value = ( 'sha1', 'directory-sha1-bin') # something that exists is all that matters here mock_backend.directory_get.return_value = {'id': b'directory-sha1-bin'} # given stub_dir_entries = [{ 'sha1': self.SHA1_SAMPLE_BIN, 'sha256': self.SHA256_SAMPLE_BIN, 'sha1_git': self.SHA1GIT_SAMPLE_BIN, 'target': hash_to_bytes( '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'), 'dir_id': self.DIRECTORY_ID_BIN, 'name': b'bob', 'type': 10, }] expected_dir_entries = [{ 'sha1': self.SHA1_SAMPLE, 'sha256': self.SHA256_SAMPLE, 'sha1_git': self.SHA1GIT_SAMPLE, 'target': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'dir_id': self.DIRECTORY_ID, 'name': 'bob', 'type': 10, }] mock_backend.directory_ls.return_value = stub_dir_entries # when actual_directory_ls = list(service.lookup_directory( 'directory-sha1')) # then self.assertEqual(actual_directory_ls, expected_dir_entries) mock_query.parse_hash_with_algorithms_or_throws.assert_called_with( 'directory-sha1', ['sha1'], 'Only sha1_git is supported.') mock_backend.directory_ls.assert_called_with( 'directory-sha1-bin') - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_revision_by_nothing_found(self, mock_backend): # given mock_backend.revision_get_by.return_value = None # when actual_revisions = service.lookup_revision_by(1) # then self.assertIsNone(actual_revisions) mock_backend.revision_get_by(1, 'master', None) - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_revision_by(self, mock_backend): # given stub_rev = self.SAMPLE_REVISION_RAW expected_rev = self.SAMPLE_REVISION mock_backend.revision_get_by.return_value = stub_rev # when actual_revision = service.lookup_revision_by(10, 'master2', 'some-ts') # then self.assertEquals(actual_revision, expected_rev) mock_backend.revision_get_by(1, 'master2', 'some-ts') - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_revision_by_nomerge(self, mock_backend): # given stub_rev = self.SAMPLE_REVISION_RAW stub_rev['parents'] = [ hash_to_bytes('adc83b19e793491b1c6ea0fd8b46cd9f32e592fc')] expected_rev = self.SAMPLE_REVISION expected_rev['parents'] = ['adc83b19e793491b1c6ea0fd8b46cd9f32e592fc'] mock_backend.revision_get_by.return_value = stub_rev # when actual_revision = service.lookup_revision_by(10, 'master2', 'some-ts') # then self.assertEquals(actual_revision, expected_rev) mock_backend.revision_get_by(1, 'master2', 'some-ts') - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_revision_by_merge(self, mock_backend): # given stub_rev = self.SAMPLE_REVISION_RAW stub_rev['parents'] = [ hash_to_bytes('adc83b19e793491b1c6ea0fd8b46cd9f32e592fc'), hash_to_bytes('ffff3b19e793491b1c6db0fd8b46cd9f32e592fc') ] expected_rev = self.SAMPLE_REVISION expected_rev['parents'] = [ 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc', 'ffff3b19e793491b1c6db0fd8b46cd9f32e592fc' ] expected_rev['merge'] = True mock_backend.revision_get_by.return_value = stub_rev # when actual_revision = service.lookup_revision_by(10, 'master2', 'some-ts') # then self.assertEquals(actual_revision, expected_rev) mock_backend.revision_get_by(1, 'master2', 'some-ts') - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.backend') @istest def lookup_revision_with_context_by_ko(self, mock_backend): # given mock_backend.revision_get_by.return_value = None # when with self.assertRaises(NotFoundExc) as cm: origin_id = 1 branch_name = 'master3' ts = None service.lookup_revision_with_context_by(origin_id, branch_name, ts, 'sha1') # then self.assertIn( 'Revision with (origin_id: %s, branch_name: %s' ', ts: %s) not found.' % (origin_id, branch_name, ts), cm.exception.args[0]) mock_backend.revision_get_by.assert_called_once_with( origin_id, branch_name, ts) - @patch('swh.web.ui.service.lookup_revision_with_context') - @patch('swh.web.ui.service.backend') + @patch('swh.web.api.service.lookup_revision_with_context') + @patch('swh.web.api.service.backend') @istest def lookup_revision_with_context_by(self, mock_backend, mock_lookup_revision_with_context): # given stub_root_rev = {'id': 'root-rev-id'} mock_backend.revision_get_by.return_value = {'id': 'root-rev-id'} stub_rev = {'id': 'rev-found'} mock_lookup_revision_with_context.return_value = stub_rev # when origin_id = 1 branch_name = 'master3' ts = None sha1_git = 'sha1' actual_root_rev, actual_rev = service.lookup_revision_with_context_by( origin_id, branch_name, ts, sha1_git) # then self.assertEquals(actual_root_rev, stub_root_rev) self.assertEquals(actual_rev, stub_rev) mock_backend.revision_get_by.assert_called_once_with( origin_id, branch_name, ts) mock_lookup_revision_with_context.assert_called_once_with( stub_root_rev, sha1_git, 100) - @patch('swh.web.ui.service.backend') - @patch('swh.web.ui.service.query') + @patch('swh.web.api.service.backend') + @patch('swh.web.api.service.query') @istest def lookup_entity_by_uuid(self, mock_query, mock_backend): # given uuid_test = 'correct-uuid' mock_query.parse_uuid4.return_value = uuid_test stub_entities = [{'uuid': uuid_test}] mock_backend.entity_get.return_value = stub_entities # when actual_entities = list(service.lookup_entity_by_uuid(uuid_test)) # then self.assertEquals(actual_entities, stub_entities) mock_query.parse_uuid4.assert_called_once_with(uuid_test) mock_backend.entity_get.assert_called_once_with(uuid_test) @istest def lookup_revision_through_ko_not_implemented(self): # then with self.assertRaises(NotImplementedError): service.lookup_revision_through({ 'something-unknown': 10, }) - @patch('swh.web.ui.service.lookup_revision_with_context_by') + @patch('swh.web.api.service.lookup_revision_with_context_by') @istest def lookup_revision_through_with_context_by(self, mock_lookup): # given stub_rev = {'id': 'rev'} mock_lookup.return_value = stub_rev # when actual_revision = service.lookup_revision_through({ 'origin_id': 1, 'branch_name': 'master', 'ts': None, 'sha1_git': 'sha1-git' }, limit=1000) # then self.assertEquals(actual_revision, stub_rev) mock_lookup.assert_called_once_with( 1, 'master', None, 'sha1-git', 1000) - @patch('swh.web.ui.service.lookup_revision_by') + @patch('swh.web.api.service.lookup_revision_by') @istest def lookup_revision_through_with_revision_by(self, mock_lookup): # given stub_rev = {'id': 'rev'} mock_lookup.return_value = stub_rev # when actual_revision = service.lookup_revision_through({ 'origin_id': 2, 'branch_name': 'master2', 'ts': 'some-ts', }, limit=10) # then self.assertEquals(actual_revision, stub_rev) mock_lookup.assert_called_once_with( 2, 'master2', 'some-ts') - @patch('swh.web.ui.service.lookup_revision_with_context') + @patch('swh.web.api.service.lookup_revision_with_context') @istest def lookup_revision_through_with_context(self, mock_lookup): # given stub_rev = {'id': 'rev'} mock_lookup.return_value = stub_rev # when actual_revision = service.lookup_revision_through({ 'sha1_git_root': 'some-sha1-root', 'sha1_git': 'some-sha1', }) # then self.assertEquals(actual_revision, stub_rev) mock_lookup.assert_called_once_with( 'some-sha1-root', 'some-sha1', 100) - @patch('swh.web.ui.service.lookup_revision') + @patch('swh.web.api.service.lookup_revision') @istest def lookup_revision_through_with_revision(self, mock_lookup): # given stub_rev = {'id': 'rev'} mock_lookup.return_value = stub_rev # when actual_revision = service.lookup_revision_through({ 'sha1_git': 'some-sha1', }) # then self.assertEquals(actual_revision, stub_rev) mock_lookup.assert_called_once_with( 'some-sha1') - @patch('swh.web.ui.service.lookup_revision_through') + @patch('swh.web.api.service.lookup_revision_through') @istest def lookup_directory_through_revision_ko_not_found( self, mock_lookup_rev): # given mock_lookup_rev.return_value = None # when with self.assertRaises(NotFoundExc): service.lookup_directory_through_revision( {'id': 'rev'}, 'some/path', 100) mock_lookup_rev.assert_called_once_with({'id': 'rev'}, 100) - @patch('swh.web.ui.service.lookup_revision_through') - @patch('swh.web.ui.service.lookup_directory_with_revision') + @patch('swh.web.api.service.lookup_revision_through') + @patch('swh.web.api.service.lookup_directory_with_revision') @istest def lookup_directory_through_revision_ok_with_data( self, mock_lookup_dir, mock_lookup_rev): # given mock_lookup_rev.return_value = {'id': 'rev-id'} mock_lookup_dir.return_value = {'type': 'dir', 'content': []} # when rev_id, dir_result = service.lookup_directory_through_revision( {'id': 'rev'}, 'some/path', 100) # then self.assertEquals(rev_id, 'rev-id') self.assertEquals(dir_result, {'type': 'dir', 'content': []}) mock_lookup_rev.assert_called_once_with({'id': 'rev'}, 100) mock_lookup_dir.assert_called_once_with('rev-id', 'some/path', False) - @patch('swh.web.ui.service.lookup_revision_through') - @patch('swh.web.ui.service.lookup_directory_with_revision') + @patch('swh.web.api.service.lookup_revision_through') + @patch('swh.web.api.service.lookup_directory_with_revision') @istest def lookup_directory_through_revision_ok_with_content( self, mock_lookup_dir, mock_lookup_rev): # given mock_lookup_rev.return_value = {'id': 'rev-id'} stub_result = {'type': 'file', 'revision': 'rev-id', 'content': {'data': b'blah', 'sha1': 'sha1'}} mock_lookup_dir.return_value = stub_result # when rev_id, dir_result = service.lookup_directory_through_revision( {'id': 'rev'}, 'some/path', 10, with_data=True) # then self.assertEquals(rev_id, 'rev-id') self.assertEquals(dir_result, stub_result) mock_lookup_rev.assert_called_once_with({'id': 'rev'}, 10) mock_lookup_dir.assert_called_once_with('rev-id', 'some/path', True) diff --git a/swh/web/api/tests/test_templatetags.py b/swh/web/api/tests/test_templatetags.py new file mode 100644 index 00000000..8773d6ef --- /dev/null +++ b/swh/web/api/tests/test_templatetags.py @@ -0,0 +1,66 @@ +# Copyright (C) 2015-2017 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU Affero General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import unittest + +from nose.tools import istest + +from swh.web.api.templatetags import api_extras + + +class SWHApiTemplateTagsTest(unittest.TestCase): + @istest + def urlize_api_links_api(self): + # update api link with html links content with links + content = '{"url": "/api/1/abc/"}' + expected_content = ('{"url": "/api/1/abc/"}') + + self.assertEquals(api_extras.urlize_api_links(content), + expected_content) + + @istest + def urlize_api_links_browse(self): + # update /browse link with html links content with links + content = '{"url": "/browse/def/"}' + expected_content = ('{"url": "' + '/browse/def/"}') + self.assertEquals(api_extras.urlize_api_links(content), + expected_content) + + @istest + def urlize_header_links(self): + # update api link with html links content with links + content = """; rel="next" +; rel="prev" +""" + expected_content = """</api/1/abc/>; rel="next" +</api/1/def/>; rel="prev" +""" + + self.assertEquals(api_extras.urlize_header_links(content), + expected_content) + + @istest + def safe_docstring_display(self): + # update api link with html links content with links + docstring = """This is my list header: + + - Here is item 1, with a continuation + line right here + - Here is item 2 + + Here is something that is not part of the list""" + + expected_docstring = """

This is my list header:

+
    +
  • Here is item 1, with a continuation +line right here
  • +
  • Here is item 2
  • +
+

Here is something that is not part of the list

+""" + + self.assertEquals(api_extras.safe_docstring_display(docstring), + expected_docstring) diff --git a/swh/web/ui/tests/test_utils.py b/swh/web/api/tests/test_utils.py similarity index 67% rename from swh/web/ui/tests/test_utils.py rename to swh/web/api/tests/test_utils.py index 103eea97..3f9ed246 100644 --- a/swh/web/ui/tests/test_utils.py +++ b/swh/web/api/tests/test_utils.py @@ -1,978 +1,941 @@ # Copyright (C) 2015 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import dateutil import unittest from unittest.mock import patch, call from nose.tools import istest, nottest -from swh.web.ui import utils +from swh.web.api import utils class UtilsTestCase(unittest.TestCase): def setUp(self): + self.maxDiff = None self.url_map = [dict(rule='/other/', methods=set(['GET', 'POST', 'HEAD']), endpoint='foo'), dict(rule='/some/old/url/', methods=set(['GET', 'POST']), endpoint='blablafn'), dict(rule='/other/old/url/', methods=set(['GET', 'HEAD']), endpoint='bar'), dict(rule='/other', methods=set([]), endpoint=None), dict(rule='/other2', methods=set([]), endpoint=None)] @istest def filter_endpoints_1(self): # when actual_data = utils.filter_endpoints(self.url_map, '/some') # then self.assertEquals(actual_data, { '/some/old/url/': { 'methods': ['GET', 'POST'], 'endpoint': 'blablafn' } }) @istest def filter_endpoints_2(self): # when actual_data = utils.filter_endpoints(self.url_map, '/other', blacklist=['/other2']) # then # rules /other is skipped because its' exactly the prefix url # rules /other2 is skipped because it's blacklisted self.assertEquals(actual_data, { '/other/': { 'methods': ['GET', 'HEAD', 'POST'], 'endpoint': 'foo' }, '/other/old/url/': { 'methods': ['GET', 'HEAD'], 'endpoint': 'bar' } }) @istest def prepare_data_for_view_default_encoding(self): self.maxDiff = None # given inputs = [ { 'data': b'some blah data' }, { 'data': 1, 'data_url': '/api/1/some/api/call', }, { 'blah': 'foobar', 'blah_url': '/some/non/changed/api/call' }] # when actual_result = utils.prepare_data_for_view(inputs) # then self.assertEquals(actual_result, [ { 'data': 'some blah data', }, { 'data': 1, 'data_url': '/browse/some/api/call', }, { 'blah': 'foobar', 'blah_url': '/some/non/changed/api/call' } ]) @istest def prepare_data_for_view(self): self.maxDiff = None # given inputs = [ { 'data': b'some blah data' }, { 'data': 1, 'data_url': '/api/1/some/api/call', }, { 'blah': 'foobar', 'blah_url': '/some/non/changed/api/call' }] # when actual_result = utils.prepare_data_for_view(inputs, encoding='ascii') # then self.assertEquals(actual_result, [ { 'data': 'some blah data', }, { 'data': 1, 'data_url': '/browse/some/api/call', }, { 'blah': 'foobar', 'blah_url': '/some/non/changed/api/call' } ]) @istest def prepare_data_for_view_ko_cannot_decode(self): self.maxDiff = None # given inputs = { 'data': 'hé dude!'.encode('utf8'), } actual_result = utils.prepare_data_for_view(inputs, encoding='ascii') # then self.assertEquals(actual_result, { 'data': "Cannot decode the data bytes, try and set another " "encoding in the url (e.g. ?encoding=utf8) or " "download directly the " "content's raw data.", }) @istest def filter_field_keys_dict_unknown_keys(self): # when actual_res = utils.filter_field_keys( {'directory': 1, 'file': 2, 'link': 3}, {'directory1', 'file2'}) # then self.assertEqual(actual_res, {}) @istest def filter_field_keys_dict(self): # when actual_res = utils.filter_field_keys( {'directory': 1, 'file': 2, 'link': 3}, {'directory', 'link'}) # then self.assertEqual(actual_res, {'directory': 1, 'link': 3}) @istest def filter_field_keys_list_unknown_keys(self): # when actual_res = utils.filter_field_keys( [{'directory': 1, 'file': 2, 'link': 3}, {'1': 1, '2': 2, 'link': 3}], {'d'}) # then self.assertEqual(actual_res, [{}, {}]) @istest def filter_field_keys_map(self): # when actual_res = utils.filter_field_keys( map(lambda x: {'i': x['i']+1, 'j': x['j']}, [{'i': 1, 'j': None}, {'i': 2, 'j': None}, {'i': 3, 'j': None}]), {'i'}) # then self.assertEqual(list(actual_res), [{'i': 2}, {'i': 3}, {'i': 4}]) @istest def filter_field_keys_list(self): # when actual_res = utils.filter_field_keys( [{'directory': 1, 'file': 2, 'link': 3}, {'dir': 1, 'fil': 2, 'lin': 3}], {'directory', 'dir'}) # then self.assertEqual(actual_res, [{'directory': 1}, {'dir': 1}]) @istest def filter_field_keys_other(self): # given input_set = {1, 2} # when actual_res = utils.filter_field_keys(input_set, {'a', '1'}) # then self.assertEqual(actual_res, input_set) @istest def fmap(self): self.assertEquals([2, 3, None, 4], utils.fmap(lambda x: x+1, [1, 2, None, 3])) self.assertEquals([11, 12, 13], list(utils.fmap(lambda x: x+10, map(lambda x: x, [1, 2, 3])))) self.assertEquals({'a': 2, 'b': 4}, utils.fmap(lambda x: x*2, {'a': 1, 'b': 2})) self.assertEquals(100, utils.fmap(lambda x: x*10, 10)) self.assertEquals({'a': [2, 6], 'b': 4}, utils.fmap(lambda x: x*2, {'a': [1, 3], 'b': 2})) self.assertIsNone(utils.fmap(lambda x: x, None)) @istest def person_to_string(self): self.assertEqual(utils.person_to_string(dict(name='raboof', email='foo@bar')), 'raboof ') @istest def parse_timestamp(self): input_timestamps = [ None, '2016-01-12', '2016-01-12T09:19:12+0100', 'Today is January 1, 2047 at 8:21:00AM', '1452591542', ] output_dates = [ None, datetime.datetime(2016, 1, 12, 0, 0), datetime.datetime(2016, 1, 12, 9, 19, 12, tzinfo=dateutil.tz.tzoffset(None, 3600)), datetime.datetime(2047, 1, 1, 8, 21), datetime.datetime(2016, 1, 12, 9, 39, 2, tzinfo=datetime.timezone.utc), ] for ts, exp_date in zip(input_timestamps, output_dates): self.assertEquals(utils.parse_timestamp(ts), exp_date) @istest def enrich_release_0(self): # when actual_release = utils.enrich_release({}) # then self.assertEqual(actual_release, {}) - @patch('swh.web.ui.utils.flask') + @patch('swh.web.api.utils.reverse') @istest - def enrich_release_1(self, mock_flask): + def enrich_release_1(self, mock_django_reverse): # given - def url_for_test_context(url, **kwargs): - if url == 'api_content_metadata': + def reverse_test_context(view_name, kwargs): + if view_name == 'content': id = kwargs['q'] return '/api/1/content/%s/' % id - elif url == 'api_person': + elif view_name == 'person': id = kwargs['person_id'] return '/api/1/person/%s/' % id else: raise ValueError( 'This should not happened so fail if it does.') - mock_flask.url_for.side_effect = url_for_test_context + mock_django_reverse.side_effect = reverse_test_context # when actual_release = utils.enrich_release({ 'target': '123', 'target_type': 'content', 'author': { 'id': 100, 'name': 'author release name', 'email': 'author@email', }, }) # then self.assertEqual(actual_release, { 'target': '123', 'target_type': 'content', 'target_url': '/api/1/content/sha1_git:123/', 'author_url': '/api/1/person/100/', 'author': { 'id': 100, 'name': 'author release name', 'email': 'author@email', }, }) - mock_flask.url_for.assert_has_calls([ - call('api_content_metadata', q='sha1_git:123'), - call('api_person', person_id=100) + mock_django_reverse.assert_has_calls([ + call('content', kwargs={'q': 'sha1_git:123'}), + call('person', kwargs={'person_id': 100}) ]) - @patch('swh.web.ui.utils.flask') + @patch('swh.web.api.utils.reverse') @istest - def enrich_release_2(self, mock_flask): + def enrich_release_2(self, mock_django_reverse): # given - mock_flask.url_for.return_value = '/api/1/dir/23/' + mock_django_reverse.return_value = '/api/1/dir/23/' # when actual_release = utils.enrich_release({'target': '23', 'target_type': 'directory'}) # then self.assertEqual(actual_release, { 'target': '23', 'target_type': 'directory', 'target_url': '/api/1/dir/23/' }) - mock_flask.url_for.assert_called_once_with('api_directory', - q='23') + mock_django_reverse.assert_called_once_with('directory', + kwargs={'sha1_git': '23'}) - @patch('swh.web.ui.utils.flask') + @patch('swh.web.api.utils.reverse') @istest - def enrich_release_3(self, mock_flask): + def enrich_release_3(self, mock_django_reverse): # given - mock_flask.url_for.return_value = '/api/1/rev/3/' + mock_django_reverse.return_value = '/api/1/rev/3/' # when actual_release = utils.enrich_release({'target': '3', 'target_type': 'revision'}) # then self.assertEqual(actual_release, { 'target': '3', 'target_type': 'revision', 'target_url': '/api/1/rev/3/' }) - mock_flask.url_for.assert_called_once_with('api_revision', - sha1_git='3') + mock_django_reverse.assert_called_once_with('revision', + kwargs={'sha1_git': '3'}) - @patch('swh.web.ui.utils.flask') + @patch('swh.web.api.utils.reverse') @istest - def enrich_release_4(self, mock_flask): + def enrich_release_4(self, mock_django_reverse): # given - mock_flask.url_for.return_value = '/api/1/rev/4/' + mock_django_reverse.return_value = '/api/1/rev/4/' # when actual_release = utils.enrich_release({'target': '4', 'target_type': 'release'}) # then self.assertEqual(actual_release, { 'target': '4', 'target_type': 'release', 'target_url': '/api/1/rev/4/' }) - mock_flask.url_for.assert_called_once_with('api_release', - sha1_git='4') + mock_django_reverse.assert_called_once_with('release', + kwargs={'sha1_git': '4'}) - @patch('swh.web.ui.utils.flask') + @patch('swh.web.api.utils.reverse') @istest - def enrich_directory_no_type(self, mock_flask): + def enrich_directory_no_type(self, mock_django_reverse): # when/then self.assertEqual(utils.enrich_directory({'id': 'dir-id'}), {'id': 'dir-id'}) # given - mock_flask.url_for.return_value = '/api/content/sha1_git:123/' + mock_django_reverse.return_value = '/api/content/sha1_git:123/' # when actual_directory = utils.enrich_directory({ 'id': 'dir-id', 'type': 'file', 'target': '123', }) # then self.assertEqual(actual_directory, { 'id': 'dir-id', 'type': 'file', 'target': '123', 'target_url': '/api/content/sha1_git:123/', }) - mock_flask.url_for.assert_called_once_with('api_content_metadata', - q='sha1_git:123') + mock_django_reverse.assert_called_once_with( + 'content', kwargs={'q': 'sha1_git:123'}) - @patch('swh.web.ui.utils.flask') + @patch('swh.web.api.utils.reverse') @istest - def enrich_directory_with_context_and_type_file(self, mock_flask): + def enrich_directory_with_context_and_type_file(self, mock_django_reverse): # given - mock_flask.url_for.return_value = '/api/content/sha1_git:123/' + mock_django_reverse.return_value = '/api/content/sha1_git:123/' # when actual_directory = utils.enrich_directory({ 'id': 'dir-id', 'type': 'file', 'name': 'hy', 'target': '789', }, context_url='/api/revision/revsha1/directory/prefix/path/') # then self.assertEqual(actual_directory, { 'id': 'dir-id', 'type': 'file', 'name': 'hy', 'target': '789', 'target_url': '/api/content/sha1_git:123/', 'file_url': '/api/revision/revsha1/directory' '/prefix/path/hy/' }) - mock_flask.url_for.assert_called_once_with('api_content_metadata', - q='sha1_git:789') + mock_django_reverse.assert_called_once_with( + 'content', kwargs={'q': 'sha1_git:789'}) - @patch('swh.web.ui.utils.flask') + @patch('swh.web.api.utils.reverse') @istest - def enrich_directory_with_context_and_type_dir(self, mock_flask): + def enrich_directory_with_context_and_type_dir(self, mock_django_reverse): # given - mock_flask.url_for.return_value = '/api/directory/456/' + mock_django_reverse.return_value = '/api/directory/456/' # when actual_directory = utils.enrich_directory({ 'id': 'dir-id', 'type': 'dir', 'name': 'emacs-42', 'target_type': 'file', 'target': '456', }, context_url='/api/revision/origin/2/directory/some/prefix/path/') # then self.assertEqual(actual_directory, { 'id': 'dir-id', 'type': 'dir', 'target_type': 'file', 'name': 'emacs-42', 'target': '456', 'target_url': '/api/directory/456/', 'dir_url': '/api/revision/origin/2/directory' '/some/prefix/path/emacs-42/' }) - mock_flask.url_for.assert_called_once_with('api_directory', - sha1_git='456') + mock_django_reverse.assert_called_once_with('directory', + kwargs={'sha1_git': '456'}) @istest def enrich_content_without_hashes(self): # when/then self.assertEqual(utils.enrich_content({'id': '123'}), {'id': '123'}) - @patch('swh.web.ui.utils.flask') + @patch('swh.web.api.utils.reverse') @istest - def enrich_content_with_hashes(self, mock_flask): + def enrich_content_with_hashes(self, mock_django_reverse): for h in ['sha1', 'sha256', 'sha1_git']: # given - mock_flask.url_for.side_effect = [ + mock_django_reverse.side_effect = [ '/api/content/%s:123/raw/' % h, '/api/filetype/%s:123/' % h, '/api/language/%s:123/' % h, '/api/license/%s:123/' % h, ] # when enriched_content = utils.enrich_content( { 'id': '123', h: 'blahblah' } ) # then self.assertEqual( enriched_content, { 'id': '123', h: 'blahblah', 'data_url': '/api/content/%s:123/raw/' % h, 'filetype_url': '/api/filetype/%s:123/' % h, 'language_url': '/api/language/%s:123/' % h, 'license_url': '/api/license/%s:123/' % h, } ) - mock_flask.url_for.assert_has_calls([ - call('api_content_raw', q='%s:blahblah' % h), - call('api_content_filetype', q='%s:blahblah' % h), - call('api_content_language', q='%s:blahblah' % h), - call('api_content_license', q='%s:blahblah' % h), + mock_django_reverse.assert_has_calls([ + call('content-raw', kwargs={'q': '%s:blahblah' % h}), + call('content-filetype', kwargs={'q': '%s:blahblah' % h}), + call('content-language', kwargs={'q': '%s:blahblah' % h}), + call('content-license', kwargs={'q': '%s:blahblah' % h}), ]) - mock_flask.reset() + mock_django_reverse.reset() - @patch('swh.web.ui.utils.flask') + @patch('swh.web.api.utils.reverse') @istest - def enrich_content_with_hashes_and_top_level_url(self, mock_flask): + def enrich_content_with_hashes_and_top_level_url(self, + mock_django_reverse): for h in ['sha1', 'sha256', 'sha1_git']: # given - mock_flask.url_for.side_effect = [ + mock_django_reverse.side_effect = [ '/api/content/%s:123/' % h, '/api/content/%s:123/raw/' % h, '/api/filetype/%s:123/' % h, '/api/language/%s:123/' % h, '/api/license/%s:123/' % h, ] # when enriched_content = utils.enrich_content( { 'id': '123', h: 'blahblah' }, top_url=True ) # then self.assertEqual( enriched_content, { 'id': '123', h: 'blahblah', 'content_url': '/api/content/%s:123/' % h, 'data_url': '/api/content/%s:123/raw/' % h, 'filetype_url': '/api/filetype/%s:123/' % h, 'language_url': '/api/language/%s:123/' % h, 'license_url': '/api/license/%s:123/' % h, } ) - mock_flask.url_for.assert_has_calls([ - call('api_content_metadata', q='%s:blahblah' % h), - call('api_content_raw', q='%s:blahblah' % h), - call('api_content_filetype', q='%s:blahblah' % h), - call('api_content_language', q='%s:blahblah' % h), - call('api_content_license', q='%s:blahblah' % h), + mock_django_reverse.assert_has_calls([ + call('content', kwargs={'q': '%s:blahblah' % h}), + call('content-raw', kwargs={'q': '%s:blahblah' % h}), + call('content-filetype', kwargs={'q': '%s:blahblah' % h}), + call('content-language', kwargs={'q': '%s:blahblah' % h}), + call('content-license', kwargs={'q': '%s:blahblah' % h}), ]) - mock_flask.reset() + mock_django_reverse.reset() @istest def enrich_entity_identity(self): # when/then self.assertEqual(utils.enrich_content({'id': '123'}), {'id': '123'}) - @patch('swh.web.ui.utils.flask') + @patch('swh.web.api.utils.reverse') @istest - def enrich_entity_with_sha1(self, mock_flask): + def enrich_entity_with_sha1(self, mock_django_reverse): # given - def url_for_test(fn, **entity): - return '/api/entity/' + entity['uuid'] + '/' + def reverse_test(view_name, kwargs): + return '/api/entity/' + kwargs['uuid'] + '/' - mock_flask.url_for.side_effect = url_for_test + mock_django_reverse.side_effect = reverse_test # when actual_entity = utils.enrich_entity({ 'uuid': 'uuid-1', 'parent': 'uuid-parent', 'name': 'something' }) # then self.assertEqual(actual_entity, { 'uuid': 'uuid-1', 'uuid_url': '/api/entity/uuid-1/', 'parent': 'uuid-parent', 'parent_url': '/api/entity/uuid-parent/', 'name': 'something', }) - mock_flask.url_for.assert_has_calls([call('api_entity_by_uuid', - uuid='uuid-1'), - call('api_entity_by_uuid', - uuid='uuid-parent')]) + mock_django_reverse.assert_has_calls( + [call('entity', kwargs={'uuid': 'uuid-1'}), + call('entity', kwargs={'uuid': 'uuid-parent'})]) @nottest - def _url_for_context_test(self, fn, **data): - if fn == 'api_revision': - if 'context' in data and data['context'] is not None: - return '/api/revision/%s/prev/%s/' % (data['sha1_git'], data['context']) # noqa - else: - return '/api/revision/%s/' % data['sha1_git'] - elif fn == 'api_revision_log': - if 'prev_sha1s' in data: - return '/api/revision/%s/prev/%s/log/' % (data['sha1_git'], data['prev_sha1s']) # noqa + def _reverse_context_test(self, view_name, kwargs): + if view_name == 'revision': + return '/api/revision/%s/' % kwargs['sha1_git'] + elif view_name == 'revision-context': + return '/api/revision/%s/prev/%s/' % (kwargs['sha1_git'], kwargs['context']) # noqa + elif view_name == 'revision-log': + if 'prev_sha1s' in kwargs: + return '/api/revision/%s/prev/%s/log/' % (kwargs['sha1_git'], kwargs['prev_sha1s']) # noqa else: - return '/api/revision/%s/log/' % data['sha1_git'] + return '/api/revision/%s/log/' % kwargs['sha1_git'] - @patch('swh.web.ui.utils.flask') + @patch('swh.web.api.utils.reverse') @istest - def enrich_revision_without_children_or_parent(self, mock_flask): + def enrich_revision_without_children_or_parent(self, mock_django_reverse): # given - def url_for_test(fn, **data): - if fn == 'api_revision': - return '/api/revision/' + data['sha1_git'] + '/' - elif fn == 'api_revision_log': - return '/api/revision/' + data['sha1_git'] + '/log/' - elif fn == 'api_directory': - return '/api/directory/' + data['sha1_git'] + '/' - elif fn == 'api_person': - return '/api/person/' + data['person_id'] + '/' - - mock_flask.url_for.side_effect = url_for_test + def reverse_test(view_name, kwargs): + if view_name == 'revision': + return '/api/revision/' + kwargs['sha1_git'] + '/' + elif view_name == 'revision-log': + return '/api/revision/' + kwargs['sha1_git'] + '/log/' + elif view_name == 'directory': + return '/api/directory/' + kwargs['sha1_git'] + '/' + elif view_name == 'person': + return '/api/person/' + kwargs['person_id'] + '/' + + mock_django_reverse.side_effect = reverse_test # when actual_revision = utils.enrich_revision({ 'id': 'rev-id', 'directory': '123', 'author': {'id': '1'}, 'committer': {'id': '2'}, }) expected_revision = { 'id': 'rev-id', 'directory': '123', 'url': '/api/revision/rev-id/', 'history_url': '/api/revision/rev-id/log/', 'directory_url': '/api/directory/123/', 'author': {'id': '1'}, 'author_url': '/api/person/1/', 'committer': {'id': '2'}, 'committer_url': '/api/person/2/' } # then self.assertEqual(actual_revision, expected_revision) - mock_flask.url_for.assert_has_calls( - [call('api_revision', - sha1_git='rev-id'), - call('api_revision_log', - sha1_git='rev-id'), - call('api_person', - person_id='1'), - call('api_person', - person_id='2'), - call('api_directory', - sha1_git='123')]) + mock_django_reverse.assert_has_calls( + [call('revision', kwargs={'sha1_git': 'rev-id'}), + call('revision-log', kwargs={'sha1_git': 'rev-id'}), + call('person', kwargs={'person_id': '1'}), + call('person', kwargs={'person_id': '2'}), + call('directory', kwargs={'sha1_git': '123'})]) - @patch('swh.web.ui.utils.flask') + @patch('swh.web.api.utils.reverse') @istest def enrich_revision_with_children_and_parent_no_dir(self, - mock_flask): + mock_django_reverse): # given - mock_flask.url_for.side_effect = self._url_for_context_test + mock_django_reverse.side_effect = self._reverse_context_test # when actual_revision = utils.enrich_revision({ 'id': 'rev-id', 'parents': ['123'], 'children': ['456'], }, context='prev-rev') expected_revision = { 'id': 'rev-id', 'url': '/api/revision/rev-id/', 'history_url': '/api/revision/rev-id/log/', 'history_context_url': '/api/revision/rev-id/prev/prev-rev/log/', 'parents': [{'id': '123', 'url': '/api/revision/123/'}], 'children': ['456'], 'children_urls': ['/api/revision/456/', '/api/revision/prev-rev/'], } # then self.assertEqual(actual_revision, expected_revision) - mock_flask.url_for.assert_has_calls( - [call('api_revision', - sha1_git='prev-rev'), - call('api_revision', - sha1_git='rev-id'), - call('api_revision_log', - sha1_git='rev-id'), - call('api_revision_log', - sha1_git='rev-id', - prev_sha1s='prev-rev'), - call('api_revision', - sha1_git='123'), - call('api_revision', - sha1_git='456')]) - - @patch('swh.web.ui.utils.flask') - @istest - def enrich_revision_no_context(self, mock_flask): + mock_django_reverse.assert_has_calls( + [call('revision', kwargs={'sha1_git': 'prev-rev'}), + call('revision', kwargs={'sha1_git': 'rev-id'}), + call('revision-log', kwargs={'sha1_git': 'rev-id'}), + call('revision-log', kwargs={'sha1_git': 'rev-id', + 'prev_sha1s': 'prev-rev'}), + call('revision', kwargs={'sha1_git': '123'}), + call('revision', kwargs={'sha1_git': '456'})]) + + @patch('swh.web.api.utils.reverse') + @istest + def enrich_revision_no_context(self, mock_django_reverse): # given - mock_flask.url_for.side_effect = self._url_for_context_test + mock_django_reverse.side_effect = self._reverse_context_test # when actual_revision = utils.enrich_revision({ 'id': 'rev-id', 'parents': ['123'], 'children': ['456'], }) expected_revision = { 'id': 'rev-id', 'url': '/api/revision/rev-id/', 'history_url': '/api/revision/rev-id/log/', 'parents': [{'id': '123', 'url': '/api/revision/123/'}], 'children': ['456'], 'children_urls': ['/api/revision/456/'] } # then self.assertEqual(actual_revision, expected_revision) - mock_flask.url_for.assert_has_calls( - [call('api_revision', - sha1_git='rev-id'), - call('api_revision_log', - sha1_git='rev-id'), - call('api_revision', - sha1_git='123'), - call('api_revision', - sha1_git='456')]) + mock_django_reverse.assert_has_calls( + [call('revision', kwargs={'sha1_git': 'rev-id'}), + call('revision-log', kwargs={'sha1_git': 'rev-id'}), + call('revision', kwargs={'sha1_git': '123'}), + call('revision', kwargs={'sha1_git': '456'})]) - @patch('swh.web.ui.utils.flask') + @patch('swh.web.api.utils.reverse') @istest - def enrich_revision_context_empty_prev_list(self, mock_flask): + def enrich_revision_context_empty_prev_list(self, mock_django_reverse): # given - mock_flask.url_for.side_effect = self._url_for_context_test + mock_django_reverse.side_effect = self._reverse_context_test # when expected_revision = { 'id': 'rev-id', 'url': '/api/revision/rev-id/', 'history_url': '/api/revision/rev-id/log/', 'history_context_url': ('/api/revision/rev-id/' 'prev/prev-rev/log/'), 'parents': [{'id': '123', 'url': '/api/revision/123/'}], 'children': ['456'], - 'children_urls': ['/api/revision/456/', '/api/revision/prev-rev/'], + 'children_urls': ['/api/revision/456/', + '/api/revision/prev-rev/'], } actual_revision = utils.enrich_revision({ 'id': 'rev-id', 'url': '/api/revision/rev-id/', 'parents': ['123'], 'children': ['456']}, context='prev-rev') # then self.assertEqual(actual_revision, expected_revision) - mock_flask.url_for.assert_has_calls( - [call('api_revision', - sha1_git='prev-rev'), - call('api_revision', - sha1_git='rev-id'), - call('api_revision_log', - sha1_git='rev-id'), - call('api_revision_log', - sha1_git='rev-id', - prev_sha1s='prev-rev'), - call('api_revision', - sha1_git='123'), - call('api_revision', - sha1_git='456')]) - - @patch('swh.web.ui.utils.flask') - @istest - def enrich_revision_context_some_prev_list(self, mock_flask): + mock_django_reverse.assert_has_calls( + [call('revision', kwargs={'sha1_git': 'prev-rev'}), + call('revision', kwargs={'sha1_git': 'rev-id'}), + call('revision-log', kwargs={'sha1_git': 'rev-id'}), + call('revision-log', kwargs={'sha1_git': 'rev-id', + 'prev_sha1s': 'prev-rev'}), + call('revision', kwargs={'sha1_git': '123'}), + call('revision', kwargs={'sha1_git': '456'})]) + + @patch('swh.web.api.utils.reverse') + @istest + def enrich_revision_context_some_prev_list(self, mock_django_reverse): # given - mock_flask.url_for.side_effect = self._url_for_context_test + mock_django_reverse.side_effect = self._reverse_context_test # when expected_revision = { 'id': 'rev-id', 'url': '/api/revision/rev-id/', 'history_url': '/api/revision/rev-id/log/', 'history_context_url': ('/api/revision/rev-id/' 'prev/prev1-rev/prev0-rev/log/'), 'parents': [{'id': '123', 'url': '/api/revision/123/'}], 'children': ['456'], 'children_urls': ['/api/revision/456/', '/api/revision/prev0-rev/prev/prev1-rev/'], } actual_revision = utils.enrich_revision({ 'id': 'rev-id', 'parents': ['123'], 'children': ['456']}, context='prev1-rev/prev0-rev') # then self.assertEqual(actual_revision, expected_revision) - mock_flask.url_for.assert_has_calls( - [call('api_revision', - sha1_git='prev0-rev', - context='prev1-rev'), - call('api_revision', - sha1_git='rev-id'), - call('api_revision_log', - sha1_git='rev-id'), - call('api_revision_log', - sha1_git='rev-id', - prev_sha1s='prev1-rev/prev0-rev'), - call('api_revision', - sha1_git='123'), - call('api_revision', - sha1_git='456')]) + mock_django_reverse.assert_has_calls( + [call('revision-context', kwargs={'context': 'prev1-rev', + 'sha1_git': 'prev0-rev'}), + call('revision', kwargs={'sha1_git': 'rev-id'}), + call('revision-log', kwargs={'sha1_git': 'rev-id'}), + call('revision-log', kwargs={'prev_sha1s': 'prev1-rev/prev0-rev', + 'sha1_git': 'rev-id'}), + call('revision', kwargs={'sha1_git': '123'}), + call('revision', kwargs={'sha1_git': '456'})]) @nottest - def _url_for_rev_message_test(self, fn, **data): - if fn == 'api_revision': - if 'context' in data and data['context'] is not None: - return '/api/revision/%s/prev/%s/' % (data['sha1_git'], data['context']) # noqa - else: - return '/api/revision/%s/' % data['sha1_git'] - elif fn == 'api_revision_log': - if 'prev_sha1s' in data and data['prev_sha1s'] is not None: - return '/api/revision/%s/prev/%s/log/' % (data['sha1_git'], data['prev_sha1s']) # noqa + def _reverse_rev_message_test(self, view_name, kwargs): + if view_name == 'revision': + return '/api/revision/%s/' % kwargs['sha1_git'] + elif view_name == 'revision-log': + if 'prev_sha1s' in kwargs and kwargs['prev_sha1s'] is not None: + return '/api/revision/%s/prev/%s/log/' % (kwargs['sha1_git'], kwargs['prev_sha1s']) # noqa else: - return '/api/revision/%s/log/' % data['sha1_git'] - elif fn == 'api_revision_raw_message': - return '/api/revision/' + data['sha1_git'] + '/raw/' + return '/api/revision/%s/log/' % kwargs['sha1_git'] + elif view_name == 'revision-raw-message': + return '/api/revision/' + kwargs['sha1_git'] + '/raw/' else: - return '/api/revision/' + data['sha1_git_root'] + '/history/' + data['sha1_git'] + '/' # noqa + return '/api/revision/%s/prev/%s/' % (kwargs['sha1_git'], kwargs['context']) # noqa - @patch('swh.web.ui.utils.flask') + @patch('swh.web.api.utils.reverse') @istest - def enrich_revision_with_no_message(self, mock_flask): + def enrich_revision_with_no_message(self, mock_django_reverse): # given - mock_flask.url_for.side_effect = self._url_for_rev_message_test + mock_django_reverse.side_effect = self._reverse_rev_message_test # when expected_revision = { 'id': 'rev-id', 'url': '/api/revision/rev-id/', 'history_url': '/api/revision/rev-id/log/', 'history_context_url': ('/api/revision/rev-id/' 'prev/prev-rev/log/'), 'message': None, 'parents': [{'id': '123', 'url': '/api/revision/123/'}], 'children': ['456'], - 'children_urls': ['/api/revision/456/', '/api/revision/prev-rev/'], + 'children_urls': ['/api/revision/456/', + '/api/revision/prev-rev/'], } actual_revision = utils.enrich_revision({ 'id': 'rev-id', 'message': None, 'parents': ['123'], 'children': ['456'], }, context='prev-rev') # then self.assertEqual(actual_revision, expected_revision) - mock_flask.url_for.assert_has_calls( - [call('api_revision', - sha1_git='prev-rev'), - call('api_revision', - sha1_git='rev-id'), - call('api_revision_log', - sha1_git='rev-id'), - call('api_revision_log', - sha1_git='rev-id', - prev_sha1s='prev-rev'), - call('api_revision', - sha1_git='123'), - call('api_revision', - sha1_git='456')]) - - @patch('swh.web.ui.utils.flask') - @istest - def enrich_revision_with_invalid_message(self, mock_flask): + mock_django_reverse.assert_has_calls( + [call('revision', kwargs={'sha1_git': 'prev-rev'}), + call('revision', kwargs={'sha1_git': 'rev-id'}), + call('revision-log', kwargs={'sha1_git': 'rev-id'}), + call('revision-log', kwargs={'sha1_git': 'rev-id', + 'prev_sha1s': 'prev-rev'}), + call('revision', kwargs={'sha1_git': '123'}), + call('revision', kwargs={'sha1_git': '456'})] + ) + + @patch('swh.web.api.utils.reverse') + @istest + def enrich_revision_with_invalid_message(self, mock_django_reverse): # given - mock_flask.url_for.side_effect = self._url_for_rev_message_test + mock_django_reverse.side_effect = self._reverse_rev_message_test # when actual_revision = utils.enrich_revision({ 'id': 'rev-id', 'message': None, 'message_decoding_failed': True, 'parents': ['123'], 'children': ['456'], }, context='prev-rev') expected_revision = { 'id': 'rev-id', 'url': '/api/revision/rev-id/', 'history_url': '/api/revision/rev-id/log/', 'history_context_url': ('/api/revision/rev-id/' 'prev/prev-rev/log/'), 'message': None, 'message_decoding_failed': True, 'message_url': '/api/revision/rev-id/raw/', 'parents': [{'id': '123', 'url': '/api/revision/123/'}], 'children': ['456'], - 'children_urls': ['/api/revision/456/', '/api/revision/prev-rev/'], + 'children_urls': ['/api/revision/456/', + '/api/revision/prev-rev/'], } # then self.assertEqual(actual_revision, expected_revision) - mock_flask.url_for.assert_has_calls( - [call('api_revision', - sha1_git='prev-rev'), - call('api_revision', - sha1_git='rev-id'), - call('api_revision_log', - sha1_git='rev-id'), - call('api_revision_log', - sha1_git='rev-id', - prev_sha1s='prev-rev'), - call('api_revision', - sha1_git='123'), - call('api_revision', - sha1_git='456')]) + mock_django_reverse.assert_has_calls( + [call('revision', kwargs={'sha1_git': 'prev-rev'}), + call('revision', kwargs={'sha1_git': 'rev-id'}), + call('revision-log', kwargs={'sha1_git': 'rev-id'}), + call('revision-log', kwargs={'sha1_git': 'rev-id', + 'prev_sha1s': 'prev-rev'}), + call('revision', kwargs={'sha1_git': '123'}), + call('revision', kwargs={'sha1_git': '456'}), + call('revision-raw-message', kwargs={'sha1_git': 'rev-id'})]) @istest def shorten_path_noop(self): noops = [ '/api/', '/browse/', '/content/symbol/foobar/' ] for noop in noops: self.assertEqual( utils.shorten_path(noop), noop ) @istest def shorten_path_sha1(self): sha1 = 'aafb16d69fd30ff58afdd69036a26047f3aebdc6' short_sha1 = sha1[:8] + '...' templates = [ '/api/1/content/sha1:%s/', '/api/1/content/sha1_git:%s/', '/api/1/directory/%s/', '/api/1/content/sha1:%s/ctags/', ] for template in templates: self.assertEqual( utils.shorten_path(template % sha1), template % short_sha1 ) @istest def shorten_path_sha256(self): sha256 = ('aafb16d69fd30ff58afdd69036a26047' '213add102934013a014dfca031c41aef') short_sha256 = sha256[:8] + '...' templates = [ '/api/1/content/sha256:%s/', '/api/1/directory/%s/', '/api/1/content/sha256:%s/filetype/', ] for template in templates: self.assertEqual( utils.shorten_path(template % sha256), template % short_sha256 ) diff --git a/swh/web/ui/tests/views/test_api.py b/swh/web/api/tests/test_views.py similarity index 72% rename from swh/web/ui/tests/views/test_api.py rename to swh/web/api/tests/test_views.py index 26695bcf..6b08204a 100644 --- a/swh/web/ui/tests/views/test_api.py +++ b/swh/web/api/tests/test_views.py @@ -1,2498 +1,2394 @@ # Copyright (C) 2015-2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information -import json import unittest -import yaml from nose.tools import istest from unittest.mock import patch, MagicMock -from swh.web.ui.tests import test_app -from swh.web.ui import exc -from swh.web.ui.views import api -from swh.web.ui.exc import NotFoundExc, BadInputExc from swh.storage.exc import StorageDBError, StorageAPIError +from .swh_api_testcase import SWHApiTestCase +from swh.web.api.exc import NotFoundExc, BadInputExc +from swh.web.api import views -class ApiTestCase(test_app.SWHApiTestCase): + +class ApiTestCase(SWHApiTestCase): def setUp(self): self.origin_visit1 = { 'date': 1104616800.0, 'origin': 10, 'visit': 100, 'metadata': None, 'status': 'full', } self.origin1 = { 'id': 1234, 'lister': 'uuid-lister-0', 'project': 'uuid-project-0', 'url': 'ftp://some/url/to/origin/0', 'type': 'ftp' } @istest def generic_api_lookup_nothing_is_found(self): # given def test_generic_lookup_fn(sha1, another_unused_arg): assert another_unused_arg == 'unused_arg' assert sha1 == 'sha1' return None # when with self.assertRaises(NotFoundExc) as cm: - api._api_lookup( + views._api_lookup( test_generic_lookup_fn, 'sha1', 'unused_arg', notfound_msg='This will be raised because None is returned.') self.assertIn('This will be raised because None is returned.', cm.exception.args[0]) @istest def generic_api_map_are_enriched_and_transformed_to_list(self): # given def test_generic_lookup_fn_1(criteria0, param0, param1): assert criteria0 == 'something' return map(lambda x: x + 1, [1, 2, 3]) # when - actual_result = api._api_lookup( + actual_result = views._api_lookup( test_generic_lookup_fn_1, 'something', 'some param 0', 'some param 1', notfound_msg=('This is not the error message you are looking for. ' 'Move along.'), enrich_fn=lambda x: x * 2) self.assertEqual(actual_result, [4, 6, 8]) @istest def generic_api_list_are_enriched_too(self): # given def test_generic_lookup_fn_2(crit): assert crit == 'something' return ['a', 'b', 'c'] # when - actual_result = api._api_lookup( + actual_result = views._api_lookup( test_generic_lookup_fn_2, 'something', notfound_msg=('Not the error message you are looking for, it is. ' 'Along, you move!'), enrich_fn=lambda x: ''. join(['=', x, '='])) self.assertEqual(actual_result, ['=a=', '=b=', '=c=']) @istest def generic_api_generator_are_enriched_and_returned_as_list(self): # given def test_generic_lookup_fn_3(crit): assert crit == 'crit' return (i for i in [4, 5, 6]) # when - actual_result = api._api_lookup( + actual_result = views._api_lookup( test_generic_lookup_fn_3, 'crit', notfound_msg='Move!', enrich_fn=lambda x: x - 1) self.assertEqual(actual_result, [3, 4, 5]) @istest def generic_api_simple_data_are_enriched_and_returned_too(self): # given def test_generic_lookup_fn_4(crit): assert crit == '123' return {'a': 10} def test_enrich_data(x): x['a'] = x['a'] * 10 return x # when - actual_result = api._api_lookup( + actual_result = views._api_lookup( test_generic_lookup_fn_4, '123', notfound_msg='Nothing to do', enrich_fn=test_enrich_data) self.assertEqual(actual_result, {'a': 100}) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_content_filetype(self, mock_service): stub_filetype = { - 'mimetype': 'application/xml', + 'accepted_media_type': 'application/xml', 'encoding': 'ascii', 'id': '34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03', } mock_service.lookup_content_filetype.return_value = stub_filetype # when - rv = self.app.get( + rv = self.client.get( '/api/1/content/' 'sha1_git:b04caf10e9535160d90e874b45aa426de762f19f/filetype/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { - 'mimetype': 'application/xml', + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { + 'accepted_media_type': 'application/xml', 'encoding': 'ascii', 'id': '34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'content_url': '/api/1/content/' 'sha1:34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03/', }) mock_service.lookup_content_filetype.assert_called_once_with( 'sha1_git:b04caf10e9535160d90e874b45aa426de762f19f') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_content_filetype_sha_not_found(self, mock_service): # given mock_service.lookup_content_filetype.return_value = None # when - rv = self.app.get( + rv = self.client.get( '/api/1/content/sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03/' 'filetype/') # then self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', 'reason': 'No filetype information found for content ' 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03.' }) mock_service.lookup_content_filetype.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_content_language(self, mock_service): stub_language = { 'lang': 'lisp', 'id': '34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03', } mock_service.lookup_content_language.return_value = stub_language # when - rv = self.app.get( + rv = self.client.get( '/api/1/content/' 'sha1_git:b04caf10e9535160d90e874b45aa426de762f19f/language/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'lang': 'lisp', 'id': '34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'content_url': '/api/1/content/' 'sha1:34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03/', }) mock_service.lookup_content_language.assert_called_once_with( 'sha1_git:b04caf10e9535160d90e874b45aa426de762f19f') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_content_language_sha_not_found(self, mock_service): # given mock_service.lookup_content_language.return_value = None # when - rv = self.app.get( + rv = self.client.get( '/api/1/content/sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03' '/language/') # then self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', 'reason': 'No language information found for content ' 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03.' }) mock_service.lookup_content_language.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_content_symbol(self, mock_service): stub_ctag = [{ 'sha1': '34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'name': 'foobar', 'kind': 'Haskell', 'line': 10, }] mock_service.lookup_expression.return_value = stub_ctag # when - rv = self.app.get('/api/1/content/symbol/foo/?last_sha1=sha1') + rv = self.client.get('/api/1/content/symbol/foo/?last_sha1=sha1') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, [{ + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, [{ 'sha1': '34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'name': 'foobar', 'kind': 'Haskell', 'line': 10, 'content_url': '/api/1/content/' 'sha1:34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03/', 'data_url': '/api/1/content/' 'sha1:34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03/raw/', 'license_url': '/api/1/content/' 'sha1:34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03/license/', 'language_url': '/api/1/content/' 'sha1:34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03/language/', 'filetype_url': '/api/1/content/' 'sha1:34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03/filetype/', }]) - actual_headers = dict(rv.headers) - self.assertFalse('Link' in actual_headers) + self.assertFalse('Link' in rv) mock_service.lookup_expression.assert_called_once_with( 'foo', 'sha1', 10) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_content_symbol_2(self, mock_service): stub_ctag = [{ 'sha1': '12371b8614fcd89ccd17ca2b1d9e66c5b00a6456', 'name': 'foobar', 'kind': 'Haskell', 'line': 10, }, { 'sha1': '34571b8614fcd89ccd17ca2b1d9e66c5b00a6678', 'name': 'foo', 'kind': 'Lisp', 'line': 10, }] mock_service.lookup_expression.return_value = stub_ctag # when - rv = self.app.get( + rv = self.client.get( '/api/1/content/symbol/foo/?last_sha1=prev-sha1&per_page=2') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, stub_ctag) - actual_headers = dict(rv.headers) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, stub_ctag) self.assertTrue( - actual_headers['Link'] == '; rel="next"' or # noqa - actual_headers['Link'] == '; rel="next"' # noqa + rv['Link'] == '; rel="next"' or # noqa + rv['Link'] == '; rel="next"' # noqa ) mock_service.lookup_expression.assert_called_once_with( 'foo', 'prev-sha1', 2) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') # @istest def api_content_symbol_3(self, mock_service): stub_ctag = [{ 'sha1': '67891b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'name': 'foo', 'kind': 'variable', 'line': 100, }] mock_service.lookup_expression.return_value = stub_ctag # when - rv = self.app.get('/api/1/content/symbol/foo/') + rv = self.client.get('/api/1/content/symbol/foo/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, [{ + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, [{ 'sha1': '67891b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'name': 'foo', 'kind': 'variable', 'line': 100, 'content_url': '/api/1/content/' 'sha1:67891b8614fcd89ccd17ca2b1d9e66c5b00a6d03/', 'data_url': '/api/1/content/' 'sha1:67891b8614fcd89ccd17ca2b1d9e66c5b00a6d03/raw/', 'license_url': '/api/1/content/' 'sha1:67891b8614fcd89ccd17ca2b1d9e66c5b00a6d03/license/', 'language_url': '/api/1/content/' 'sha1:67891b8614fcd89ccd17ca2b1d9e66c5b00a6d03/language/', 'filetype_url': '/api/1/content/' 'sha1:67891b8614fcd89ccd17ca2b1d9e66c5b00a6d03/filetype/', }]) - actual_headers = dict(rv.headers) - self.assertEquals( - actual_headers['Link'], '') + self.assertFalse(rv.has_header('Link')) mock_service.lookup_expression.assert_called_once_with('foo', None, 10) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_content_symbol_not_found(self, mock_service): # given mock_service.lookup_expression.return_value = [] # when - rv = self.app.get('/api/1/content/symbol/bar/?last_sha1=hash') + rv = self.client.get('/api/1/content/symbol/bar/?last_sha1=hash') # then self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', 'reason': 'No indexed raw content match expression \'bar\'.' }) - actual_headers = dict(rv.headers) - self.assertFalse('Link' in actual_headers) + self.assertFalse('Link' in rv) mock_service.lookup_expression.assert_called_once_with( 'bar', 'hash', 10) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_content_ctags(self, mock_service): stub_ctags = { 'id': '34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'ctags': [] } mock_service.lookup_content_ctags.return_value = stub_ctags # when - rv = self.app.get( + rv = self.client.get( '/api/1/content/' 'sha1_git:b04caf10e9535160d90e874b45aa426de762f19f/ctags/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'id': '34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'ctags': [], 'content_url': '/api/1/content/' 'sha1:34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03/', }) mock_service.lookup_content_ctags.assert_called_once_with( 'sha1_git:b04caf10e9535160d90e874b45aa426de762f19f') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_content_license(self, mock_service): stub_license = { 'licenses': ['No_license_found', 'Apache-2.0'], 'id': '34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'tool_name': 'nomos', } mock_service.lookup_content_license.return_value = stub_license # when - rv = self.app.get( + rv = self.client.get( '/api/1/content/' 'sha1_git:b04caf10e9535160d90e874b45aa426de762f19f/license/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'licenses': ['No_license_found', 'Apache-2.0'], 'id': '34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'tool_name': 'nomos', 'content_url': '/api/1/content/' 'sha1:34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03/', }) mock_service.lookup_content_license.assert_called_once_with( 'sha1_git:b04caf10e9535160d90e874b45aa426de762f19f') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_content_license_sha_not_found(self, mock_service): # given mock_service.lookup_content_license.return_value = None # when - rv = self.app.get( + rv = self.client.get( '/api/1/content/sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03/' 'license/') # then self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', 'reason': 'No license information found for content ' 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03.' }) mock_service.lookup_content_license.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_content_provenance(self, mock_service): stub_provenances = [{ 'origin': 1, 'visit': 2, 'revision': 'b04caf10e9535160d90e874b45aa426de762f19f', 'content': '34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'path': 'octavio-3.4.0/octave.html/doc_002dS_005fISREG.html' }] mock_service.lookup_content_provenance.return_value = stub_provenances # when - rv = self.app.get( + rv = self.client.get( '/api/1/content/' 'sha1_git:34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03/provenance/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, [{ + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, [{ 'origin': 1, 'visit': 2, 'origin_url': '/api/1/origin/1/', 'origin_visits_url': '/api/1/origin/1/visits/', 'origin_visit_url': '/api/1/origin/1/visit/2/', 'revision': 'b04caf10e9535160d90e874b45aa426de762f19f', 'revision_url': '/api/1/revision/' 'b04caf10e9535160d90e874b45aa426de762f19f/', 'content': '34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'content_url': '/api/1/content/' 'sha1_git:34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03/', 'path': 'octavio-3.4.0/octave.html/doc_002dS_005fISREG.html' }]) mock_service.lookup_content_provenance.assert_called_once_with( 'sha1_git:34571b8614fcd89ccd17ca2b1d9e66c5b00a6d03') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_content_provenance_sha_not_found(self, mock_service): # given mock_service.lookup_content_provenance.return_value = None # when - rv = self.app.get( + rv = self.client.get( '/api/1/content/sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03/' 'provenance/') # then self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Content with sha1:40e71b8614fcd89ccd17ca2b1d9e6' '6c5b00a6d03 not found.' }) mock_service.lookup_content_provenance.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_content_metadata(self, mock_service): # given mock_service.lookup_content.return_value = { 'sha1': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'sha1_git': 'b4e8f472ffcb01a03875b26e462eb568739f6882', 'sha256': '83c0e67cc80f60caf1fcbec2d84b0ccd7968b3be4735637006560' 'cde9b067a4f', 'length': 17, 'status': 'visible' } # when - rv = self.app.get( + rv = self.client.get( '/api/1/content/sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03/') self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'data_url': '/api/1/content/' 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03/raw/', 'filetype_url': '/api/1/content/' 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03/filetype/', 'language_url': '/api/1/content/' 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03/language/', 'license_url': '/api/1/content/' 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03/license/', 'sha1': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03', 'sha1_git': 'b4e8f472ffcb01a03875b26e462eb568739f6882', 'sha256': '83c0e67cc80f60caf1fcbec2d84b0ccd7968b3be4735637006560c' 'de9b067a4f', 'length': 17, 'status': 'visible' }) mock_service.lookup_content.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_content_not_found_as_json(self, mock_service): # given mock_service.lookup_content.return_value = None mock_service.lookup_content_provenance = MagicMock() # when - rv = self.app.get( + rv = self.client.get( '/api/1/content/sha256:83c0e67cc80f60caf1fcbec2d84b0ccd7968b3' 'be4735637006560c/') self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Content with sha256:83c0e67cc80f60caf1fcbec2d84b0ccd79' '68b3be4735637006560c not found.' }) mock_service.lookup_content.assert_called_once_with( 'sha256:83c0e67cc80f60caf1fcbec2d84b0ccd7968b3' 'be4735637006560c') mock_service.lookup_content_provenance.called = False - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_content_not_found_as_yaml(self, mock_service): # given mock_service.lookup_content.return_value = None mock_service.lookup_content_provenance = MagicMock() # when - rv = self.app.get( + rv = self.client.get( '/api/1/content/sha256:83c0e67cc80f60caf1fcbec2d84b0ccd7968b3' 'be4735637006560c/', - headers={'accept': 'application/yaml'}) + HTTP_ACCEPT='application/yaml') self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/yaml') + self.assertTrue('application/yaml' in rv['Content-Type']) - response_data = yaml.load(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Content with sha256:83c0e67cc80f60caf1fcbec2d84b0ccd79' '68b3be4735637006560c not found.' }) mock_service.lookup_content.assert_called_once_with( 'sha256:83c0e67cc80f60caf1fcbec2d84b0ccd7968b3' 'be4735637006560c') mock_service.lookup_content_provenance.called = False - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_content_raw_ko_not_found(self, mock_service): # given mock_service.lookup_content_raw.return_value = None # when - rv = self.app.get( + rv = self.client.get( '/api/1/content/sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03' '/raw/') self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Content sha1:40e71b8614fcd89ccd17ca2b1d9e6' '6c5b00a6d03 is not found.' }) mock_service.lookup_content_raw.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_content_raw_text(self, mock_service): # given stub_content = {'data': b'some content data'} mock_service.lookup_content_raw.return_value = stub_content mock_service.lookup_content_filetype.return_value = { 'mimetype': 'text/html' } # when - rv = self.app.get( + rv = self.client.get( '/api/1/content/sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03' - '/raw/', - headers={'Content-type': 'application/octet-stream', - 'Content-disposition': 'attachment'}) + '/raw/') self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/octet-stream') - headers = dict(rv.headers) + self.assertEquals(rv['Content-Type'], 'application/octet-stream') self.assertEquals( - headers['Content-disposition'], - 'attachment;filename=content_sha1_' + rv['Content-disposition'], + 'attachment; filename=content_sha1_' '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03_raw') self.assertEquals( - headers['Content-Type'], 'application/octet-stream') - self.assertEquals(rv.data, stub_content['data']) + rv['Content-Type'], 'application/octet-stream') + self.assertEquals(rv.content, stub_content['data']) mock_service.lookup_content_raw.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') mock_service.lookup_content_filetype.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_content_raw_text_with_filename(self, mock_service): # given stub_content = {'data': b'some content data'} mock_service.lookup_content_raw.return_value = stub_content mock_service.lookup_content_filetype.return_value = { 'mimetype': 'text/html' } # when - rv = self.app.get( + rv = self.client.get( '/api/1/content/sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03' - '/raw/?filename=filename.txt', - headers={'Content-type': 'application/octet-stream', - 'Content-disposition': 'attachment'}) + '/raw/?filename=filename.txt') self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/octet-stream') - headers = dict(rv.headers) + self.assertEquals(rv['Content-Type'], 'application/octet-stream') self.assertEquals( - headers['Content-disposition'], - 'attachment;filename=filename.txt') + rv['Content-disposition'], + 'attachment; filename=filename.txt') self.assertEquals( - headers['Content-Type'], 'application/octet-stream') - self.assertEquals(rv.data, stub_content['data']) + rv['Content-Type'], 'application/octet-stream') + self.assertEquals(rv.content, stub_content['data']) mock_service.lookup_content_raw.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') mock_service.lookup_content_filetype.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest - def api_content_raw_no_mimetype_text_is_not_available_for_download( + def api_content_raw_no_accepted_media_type_text_is_not_available_for_download( # noqa self, mock_service): # given stub_content = {'data': b'some content data'} mock_service.lookup_content_raw.return_value = stub_content mock_service.lookup_content_filetype.return_value = { 'mimetype': 'application/octet-stream' } # when - rv = self.app.get( + rv = self.client.get( '/api/1/content/sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03' '/raw/') self.assertEquals(rv.status_code, 403) - self.assertEquals(rv.mimetype, 'application/json') - data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'ForbiddenExc', 'reason': 'Only textual content is available for download. ' 'Actual content mimetype is application/octet-stream.' }) mock_service.lookup_content_raw.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') mock_service.lookup_content_filetype.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest - def api_content_raw_no_mimetype_found_so_not_available_for_download( + def api_content_raw_no_accepted_media_type_found_so_not_available_for_download( # noqa self, mock_service): # given stub_content = {'data': b'some content data'} mock_service.lookup_content_raw.return_value = stub_content mock_service.lookup_content_filetype.return_value = None # when - rv = self.app.get( + rv = self.client.get( '/api/1/content/sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03' '/raw/') self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Content sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03 ' 'is not available for download.' }) mock_service.lookup_content_raw.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') mock_service.lookup_content_filetype.assert_called_once_with( 'sha1:40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_check_content_known(self, mock_service): # given mock_service.lookup_multiple_hashes.return_value = [ {'found': True, 'filename': None, 'sha1': 'sha1:blah'} ] expected_result = { 'search_stats': {'nbfiles': 1, 'pct': 100}, 'search_res': [{'sha1': 'sha1:blah', 'found': True}] } # when - rv = self.app.get('/api/1/content/known/sha1:blah/') + rv = self.client.get('/api/1/content/known/sha1:blah/') self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, expected_result) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, expected_result) mock_service.lookup_multiple_hashes.assert_called_once_with( [{'filename': None, 'sha1': 'sha1:blah'}]) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_check_content_known_as_yaml(self, mock_service): # given mock_service.lookup_multiple_hashes.return_value = [ {'found': True, 'filename': None, 'sha1': 'sha1:halb'}, {'found': False, 'filename': None, 'sha1': 'sha1_git:hello'} ] expected_result = { 'search_stats': {'nbfiles': 2, 'pct': 50}, 'search_res': [{'sha1': 'sha1:halb', 'found': True}, {'sha1': 'sha1_git:hello', 'found': False}] } # when - rv = self.app.get('/api/1/content/known/sha1:halb,sha1_git:hello/', - headers={'Accept': 'application/yaml'}) + rv = self.client.get('/api/1/content/known/sha1:halb,sha1_git:hello/', + HTTP_ACCEPT='application/yaml') self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/yaml') - - response_data = yaml.load(rv.data.decode('utf-8')) - self.assertEquals(response_data, expected_result) + self.assertTrue('application/yaml' in rv['Content-Type']) + self.assertEquals(rv.data, expected_result) mock_service.lookup_multiple_hashes.assert_called_once_with( [{'filename': None, 'sha1': 'sha1:halb'}, {'filename': None, 'sha1': 'sha1_git:hello'}]) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_check_content_known_post_as_yaml(self, mock_service): # given stub_result = [{'sha1': '7e62b1fe10c88a3eddbba930b156bee2956b2435', 'found': True}, {'filename': 'filepath', 'sha1': '8e62b1fe10c88a3eddbba930b156bee2956b2435', 'found': True}, {'filename': 'filename', 'sha1': '64025b5d1520c615061842a6ce6a456cad962a3f', 'found': False}] mock_service.lookup_multiple_hashes.return_value = stub_result expected_result = { 'search_stats': {'nbfiles': 3, 'pct': 2/3 * 100}, 'search_res': stub_result } # when - rv = self.app.post( - '/api/1/content/known/', - headers={'Accept': 'application/yaml'}, + rv = self.client.post( + '/api/1/content/known/search/', data=dict( q='7e62b1fe10c88a3eddbba930b156bee2956b2435', filepath='8e62b1fe10c88a3eddbba930b156bee2956b2435', - filename='64025b5d1520c615061842a6ce6a456cad962a3f') + filename='64025b5d1520c615061842a6ce6a456cad962a3f'), + HTTP_ACCEPT='application/yaml' ) self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/yaml') - - response_data = yaml.load(rv.data.decode('utf-8')) - self.assertEquals(response_data, expected_result) + self.assertTrue('application/yaml' in rv['Content-Type']) + self.assertEquals(rv.data, expected_result) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_check_content_known_not_found(self, mock_service): # given stub_result = [{'sha1': 'sha1:halb', 'found': False}] mock_service.lookup_multiple_hashes.return_value = stub_result expected_result = { 'search_stats': {'nbfiles': 1, 'pct': 0.0}, 'search_res': stub_result } # when - rv = self.app.get('/api/1/content/known/sha1:halb/') + rv = self.client.get('/api/1/content/known/sha1:halb/') self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, expected_result) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, expected_result) mock_service.lookup_multiple_hashes.assert_called_once_with( [{'filename': None, 'sha1': 'sha1:halb'}]) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_1_stat_counters_raise_error(self, mock_service): # given mock_service.stat_counters.side_effect = ValueError( 'voluntary error to check the bad request middleware.') # when - rv = self.app.get('/api/1/stat/counters/') + rv = self.client.get('/api/1/stat/counters/') # then self.assertEquals(rv.status_code, 400) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'ValueError', 'reason': 'voluntary error to check the bad request middleware.'}) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_1_stat_counters_raise_swh_storage_error_db(self, mock_service): # given mock_service.stat_counters.side_effect = StorageDBError( 'SWH Storage exploded! Will be back online shortly!') # when - rv = self.app.get('/api/1/stat/counters/') + rv = self.client.get('/api/1/stat/counters/') # then self.assertEquals(rv.status_code, 503) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'StorageDBError', 'reason': 'An unexpected error occurred in the backend: ' 'SWH Storage exploded! Will be back online shortly!'}) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_1_stat_counters_raise_swh_storage_error_api(self, mock_service): # given mock_service.stat_counters.side_effect = StorageAPIError( 'SWH Storage API dropped dead! Will resurrect from its ashes asap!' ) # when - rv = self.app.get('/api/1/stat/counters/') + rv = self.client.get('/api/1/stat/counters/') # then self.assertEquals(rv.status_code, 503) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'StorageAPIError', 'reason': 'An unexpected error occurred in the api backend: ' 'SWH Storage API dropped dead! Will resurrect from its ashes asap!' }) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_1_stat_counters(self, mock_service): # given stub_stats = { "content": 1770830, "directory": 211683, "directory_entry_dir": 209167, "directory_entry_file": 1807094, "directory_entry_rev": 0, "entity": 0, "entity_history": 0, "occurrence": 0, "occurrence_history": 19600, "origin": 1096, "person": 0, "release": 8584, "revision": 7792, "revision_history": 0, "skipped_content": 0 } mock_service.stat_counters.return_value = stub_stats # when - rv = self.app.get('/api/1/stat/counters/') + rv = self.client.get('/api/1/stat/counters/') self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, stub_stats) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, stub_stats) mock_service.stat_counters.assert_called_once_with() - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_1_lookup_origin_visits_raise_error(self, mock_service): # given mock_service.lookup_origin_visits.side_effect = ValueError( 'voluntary error to check the bad request middleware.') # when - rv = self.app.get('/api/1/origin/2/visits/') + rv = self.client.get('/api/1/origin/2/visits/') # then self.assertEquals(rv.status_code, 400) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'ValueError', 'reason': 'voluntary error to check the bad request middleware.'}) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_1_lookup_origin_visits_raise_swh_storage_error_db( self, mock_service): # given mock_service.lookup_origin_visits.side_effect = StorageDBError( 'SWH Storage exploded! Will be back online shortly!') # when - rv = self.app.get('/api/1/origin/2/visits/') + rv = self.client.get('/api/1/origin/2/visits/') # then self.assertEquals(rv.status_code, 503) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'StorageDBError', 'reason': 'An unexpected error occurred in the backend: ' 'SWH Storage exploded! Will be back online shortly!'}) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_1_lookup_origin_visits_raise_swh_storage_error_api( self, mock_service): # given mock_service.lookup_origin_visits.side_effect = StorageAPIError( 'SWH Storage API dropped dead! Will resurrect from its ashes asap!' ) # when - rv = self.app.get('/api/1/origin/2/visits/') + rv = self.client.get('/api/1/origin/2/visits/') # then self.assertEquals(rv.status_code, 503) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'StorageAPIError', 'reason': 'An unexpected error occurred in the api backend: ' 'SWH Storage API dropped dead! Will resurrect from its ashes asap!' }) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_1_lookup_origin_visits(self, mock_service): # given stub_visits = [ { 'date': 1293919200.0, - 'origin': 1, + 'origin': 2, 'visit': 2 }, { 'date': 1420149600.0, - 'origin': 1, + 'origin': 2, 'visit': 3 } ] mock_service.lookup_origin_visits.return_value = stub_visits # when - rv = self.app.get('/api/1/origin/2/visits/?per_page=2&last_visit=1') + rv = self.client.get('/api/1/origin/2/visits/?per_page=2&last_visit=1') self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, [ + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, [ { 'date': 1293919200.0, - 'origin': 1, + 'origin': 2, 'visit': 2, - 'origin_visit_url': '/api/1/origin/1/visit/2/', + 'origin_visit_url': '/api/1/origin/2/visit/2/', }, { 'date': 1420149600.0, - 'origin': 1, + 'origin': 2, 'visit': 3, - 'origin_visit_url': '/api/1/origin/1/visit/3/', + 'origin_visit_url': '/api/1/origin/2/visit/3/', } ]) mock_service.lookup_origin_visits.assert_called_once_with( - 2, last_visit=1, per_page=2) + '2', last_visit=1, per_page=2) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_1_lookup_origin_visit(self, mock_service): # given origin_visit = self.origin_visit1.copy() origin_visit.update({ 'occurrences': { 'master': { 'target_type': 'revision', - 'target': 'revision-id', + 'target': '98564', } } }) mock_service.lookup_origin_visit.return_value = origin_visit expected_origin_visit = self.origin_visit1.copy() expected_origin_visit.update({ 'origin_url': '/api/1/origin/10/', 'occurrences': { 'master': { 'target_type': 'revision', - 'target': 'revision-id', - 'target_url': '/api/1/revision/revision-id/' + 'target': '98564', + 'target_url': '/api/1/revision/98564/' } } }) # when - rv = self.app.get('/api/1/origin/10/visit/100/') + rv = self.client.get('/api/1/origin/10/visit/100/') self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, expected_origin_visit) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, expected_origin_visit) - mock_service.lookup_origin_visit.assert_called_once_with(10, 100) + mock_service.lookup_origin_visit.assert_called_once_with('10', '100') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_1_lookup_origin_visit_not_found(self, mock_service): # given mock_service.lookup_origin_visit.return_value = None # when - rv = self.app.get('/api/1/origin/1/visit/1000/') + rv = self.client.get('/api/1/origin/1/visit/1000/') self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', 'reason': 'No visit 1000 for origin 1 found' }) - mock_service.lookup_origin_visit.assert_called_once_with(1, 1000) + mock_service.lookup_origin_visit.assert_called_once_with('1', '1000') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_origin_by_id(self, mock_service): # given mock_service.lookup_origin.return_value = self.origin1 expected_origin = self.origin1.copy() expected_origin.update({ 'origin_visits_url': '/api/1/origin/1234/visits/' }) # when - rv = self.app.get('/api/1/origin/1234/') + rv = self.client.get('/api/1/origin/1234/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, expected_origin) - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, expected_origin) + mock_service.lookup_origin.assert_called_with({'id': '1234'}) - mock_service.lookup_origin.assert_called_with({'id': 1234}) - - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_origin_by_type_url(self, mock_service): # given stub_origin = self.origin1.copy() stub_origin.update({ 'id': 987 }) mock_service.lookup_origin.return_value = stub_origin expected_origin = stub_origin.copy() expected_origin.update({ 'origin_visits_url': '/api/1/origin/987/visits/' }) # when - rv = self.app.get('/api/1/origin/ftp/url/ftp://some/url/to/origin/0/') + rv = self.client.get('/api/1/origin/ftp/url' + '/ftp://some/url/to/origin/0/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, expected_origin) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, expected_origin) mock_service.lookup_origin.assert_called_with( {'url': 'ftp://some/url/to/origin/0/', 'type': 'ftp'}) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_origin_not_found(self, mock_service): # given mock_service.lookup_origin.return_value = None # when - rv = self.app.get('/api/1/origin/4321/') + rv = self.client.get('/api/1/origin/4321/') # then self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Origin with id 4321 not found.' }) - mock_service.lookup_origin.assert_called_with({'id': 4321}) + mock_service.lookup_origin.assert_called_with({'id': '4321'}) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_release(self, mock_service): + release_id = '7045404f3d1c54e6473' + target_id = '6072557b6c10cd9a211' # given stub_release = { - 'id': 'release-0', + 'id': release_id, 'target_type': 'revision', - 'target': 'revision-sha1', + 'target': target_id, "date": "Mon, 10 Mar 1997 08:00:00 GMT", "synthetic": True, 'author': { 'id': 10, 'name': 'author release name', 'email': 'author@email', }, } expected_release = { - 'id': 'release-0', + 'id': release_id, 'target_type': 'revision', - 'target': 'revision-sha1', - 'target_url': '/api/1/revision/revision-sha1/', + 'target': target_id, + 'target_url': '/api/1/revision/%s/' % target_id, "date": "Mon, 10 Mar 1997 08:00:00 GMT", "synthetic": True, 'author_url': '/api/1/person/10/', 'author': { 'id': 10, 'name': 'author release name', 'email': 'author@email', }, } mock_service.lookup_release.return_value = stub_release # when - rv = self.app.get('/api/1/release/release-0/') + rv = self.client.get('/api/1/release/%s/' % release_id) # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, expected_release) - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, expected_release) + mock_service.lookup_release.assert_called_once_with(release_id) - mock_service.lookup_release.assert_called_once_with('release-0') - - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_release_target_type_not_a_revision(self, mock_service): + release = '8d56a78' + target = '9a5c3f' # given stub_release = { - 'id': 'release-0', + 'id': release, 'target_type': 'other-stuff', - 'target': 'other-stuff-checksum', + 'target': target, "date": "Mon, 10 Mar 1997 08:00:00 GMT", "synthetic": True, 'author': { 'id': 9, 'name': 'author release name', 'email': 'author@email', }, } expected_release = { - 'id': 'release-0', + 'id': release, 'target_type': 'other-stuff', - 'target': 'other-stuff-checksum', + 'target': target, "date": "Mon, 10 Mar 1997 08:00:00 GMT", "synthetic": True, 'author_url': '/api/1/person/9/', 'author': { 'id': 9, 'name': 'author release name', 'email': 'author@email', }, } mock_service.lookup_release.return_value = stub_release # when - rv = self.app.get('/api/1/release/release-0/') + rv = self.client.get('/api/1/release/%s/' % release) # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, expected_release) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, expected_release) - mock_service.lookup_release.assert_called_once_with('release-0') + mock_service.lookup_release.assert_called_once_with(release) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_release_not_found(self, mock_service): # given mock_service.lookup_release.return_value = None # when - rv = self.app.get('/api/1/release/release-0/') + rv = self.client.get('/api/1/release/c54e6473c71bbb716529/') # then self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', - 'reason': 'Release with sha1_git release-0 not found.' + 'reason': 'Release with sha1_git c54e6473c71bbb716529 not found.' }) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_revision(self, mock_service): # given stub_revision = { 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'author_name': 'Software Heritage', 'author_email': 'robot@softwareheritage.org', 'committer_name': 'Software Heritage', 'committer_email': 'robot@softwareheritage.org', 'message': 'synthetic revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': ['8734ef7e7c357ce2af928115c6c6a42b7e2a44e7'], 'type': 'tar', 'synthetic': True, 'metadata': { 'original_artifact': [{ 'archive_type': 'tar', 'name': 'webbase-5.7.0.tar.gz', 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' '309d36484e7edf7bb912' }] }, } mock_service.lookup_revision.return_value = stub_revision expected_revision = { 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'url': '/api/1/revision/18d8be353ed3480476f032475e7c233eff7371d5/', 'history_url': '/api/1/revision/18d8be353ed3480476f032475e7c233e' 'ff7371d5/log/', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'directory_url': '/api/1/directory/7834ef7e7c357ce2af928115c6c6' 'a42b7e2a44e6/', 'author_name': 'Software Heritage', 'author_email': 'robot@softwareheritage.org', 'committer_name': 'Software Heritage', 'committer_email': 'robot@softwareheritage.org', 'message': 'synthetic revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': [{ 'id': '8734ef7e7c357ce2af928115c6c6a42b7e2a44e7', 'url': '/api/1/revision/8734ef7e7c357ce2af928115c6c6a42b7e2a44e7/' # noqa }], 'type': 'tar', 'synthetic': True, 'metadata': { 'original_artifact': [{ 'archive_type': 'tar', 'name': 'webbase-5.7.0.tar.gz', 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd', 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1', 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f' '309d36484e7edf7bb912' }] }, } # when - rv = self.app.get('/api/1/revision/' - '18d8be353ed3480476f032475e7c233eff7371d5/') + rv = self.client.get('/api/1/revision/' + '18d8be353ed3480476f032475e7c233eff7371d5/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(expected_revision, response_data) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(expected_revision, rv.data) mock_service.lookup_revision.assert_called_once_with( '18d8be353ed3480476f032475e7c233eff7371d5') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_revision_not_found(self, mock_service): # given mock_service.lookup_revision.return_value = None # when - rv = self.app.get('/api/1/revision/revision-0/') + rv = self.client.get('/api/1/revision/12345/') # then self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', - 'reason': 'Revision with sha1_git revision-0 not found.'}) + 'reason': 'Revision with sha1_git 12345 not found.'}) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_revision_raw_ok(self, mock_service): # given stub_revision = {'message': 'synthetic revision message'} mock_service.lookup_revision_message.return_value = stub_revision # when - rv = self.app.get('/api/1/revision/18d8be353ed3480476f032475e7c2' - '33eff7371d5/raw/') + rv = self.client.get('/api/1/revision/18d8be353ed3480476f032475e7c2' + '33eff7371d5/raw/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/octet-stream') - self.assertEquals(rv.data, b'synthetic revision message') + self.assertEquals(rv['Content-Type'], 'application/octet-stream') + self.assertEquals(rv.content, b'synthetic revision message') mock_service.lookup_revision_message.assert_called_once_with( '18d8be353ed3480476f032475e7c233eff7371d5') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_revision_raw_ok_no_msg(self, mock_service): # given mock_service.lookup_revision_message.side_effect = NotFoundExc( 'No message for revision') # when - rv = self.app.get('/api/1/revision/' - '18d8be353ed3480476f032475e7c233eff7371d5/raw/') + rv = self.client.get('/api/1/revision/' + '18d8be353ed3480476f032475e7c233eff7371d5/raw/') # then self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', 'reason': 'No message for revision'}) self.assertEquals mock_service.lookup_revision_message.assert_called_once_with( '18d8be353ed3480476f032475e7c233eff7371d5') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_revision_raw_ko_no_rev(self, mock_service): # given mock_service.lookup_revision_message.side_effect = NotFoundExc( 'No revision found') # when - rv = self.app.get('/api/1/revision/' - '18d8be353ed3480476f032475e7c233eff7371d5/raw/') + rv = self.client.get('/api/1/revision/' + '18d8be353ed3480476f032475e7c233eff7371d5/raw/') # then self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', 'reason': 'No revision found'}) mock_service.lookup_revision_message.assert_called_once_with( '18d8be353ed3480476f032475e7c233eff7371d5') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_revision_with_origin_not_found(self, mock_service): mock_service.lookup_revision_by.return_value = None - rv = self.app.get('/api/1/revision/origin/123/') + rv = self.client.get('/api/1/revision/origin/123/') # then self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertIn('Revision with (origin_id: 123', response_data['reason']) - self.assertIn('not found', response_data['reason']) - self.assertEqual('NotFoundExc', response_data['exception']) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertIn('Revision with (origin_id: 123', rv.data['reason']) + self.assertIn('not found', rv.data['reason']) + self.assertEqual('NotFoundExc', rv.data['exception']) mock_service.lookup_revision_by.assert_called_once_with( - 123, + '123', 'refs/heads/master', None) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_revision_with_origin(self, mock_service): mock_revision = { 'id': '32', 'directory': '21', 'message': 'message 1', 'type': 'deb', } expected_revision = { 'id': '32', 'url': '/api/1/revision/32/', 'history_url': '/api/1/revision/32/log/', 'directory': '21', 'directory_url': '/api/1/directory/21/', 'message': 'message 1', 'type': 'deb', } mock_service.lookup_revision_by.return_value = mock_revision - rv = self.app.get('/api/1/revision/origin/1/') + rv = self.client.get('/api/1/revision/origin/1/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEqual(response_data, expected_revision) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEqual(rv.data, expected_revision) mock_service.lookup_revision_by.assert_called_once_with( - 1, + '1', 'refs/heads/master', None) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_revision_with_origin_and_branch_name(self, mock_service): mock_revision = { 'id': '12', 'directory': '23', 'message': 'message 2', 'type': 'tar', } mock_service.lookup_revision_by.return_value = mock_revision expected_revision = { 'id': '12', 'url': '/api/1/revision/12/', 'history_url': '/api/1/revision/12/log/', 'directory': '23', 'directory_url': '/api/1/directory/23/', 'message': 'message 2', 'type': 'tar', } - rv = self.app.get('/api/1/revision/origin/1/branch/refs/origin/dev/') + rv = self.client.get('/api/1/revision/origin/1' + '/branch/refs/origin/dev/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEqual(response_data, expected_revision) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEqual(rv.data, expected_revision) mock_service.lookup_revision_by.assert_called_once_with( - 1, + '1', 'refs/origin/dev', None) - @patch('swh.web.ui.views.api.service') - @patch('swh.web.ui.views.api.utils') + @patch('swh.web.api.views.service') + @patch('swh.web.api.views.utils') @istest def api_revision_with_origin_and_branch_name_and_timestamp(self, mock_utils, mock_service): mock_revision = { 'id': '123', 'directory': '456', 'message': 'message 3', 'type': 'tar', } mock_service.lookup_revision_by.return_value = mock_revision expected_revision = { 'id': '123', 'url': '/api/1/revision/123/', 'history_url': '/api/1/revision/123/log/', 'directory': '456', 'directory_url': '/api/1/directory/456/', 'message': 'message 3', 'type': 'tar', } mock_utils.parse_timestamp.return_value = 'parsed-date' mock_utils.enrich_revision.return_value = expected_revision - rv = self.app.get('/api/1/revision' - '/origin/1' - '/branch/refs/origin/dev' - '/ts/1452591542/') + rv = self.client.get('/api/1/revision' + '/origin/1' + '/branch/refs/origin/dev' + '/ts/1452591542/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEqual(response_data, expected_revision) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEqual(rv.data, expected_revision) mock_service.lookup_revision_by.assert_called_once_with( - 1, + '1', 'refs/origin/dev', 'parsed-date') mock_utils.parse_timestamp.assert_called_once_with('1452591542') mock_utils.enrich_revision.assert_called_once_with( mock_revision) - @patch('swh.web.ui.views.api.service') - @patch('swh.web.ui.views.api.utils') + @patch('swh.web.api.views.service') + @patch('swh.web.api.views.utils') @istest def api_revision_with_origin_and_branch_name_and_timestamp_with_escapes( self, mock_utils, mock_service): mock_revision = { 'id': '999', } mock_service.lookup_revision_by.return_value = mock_revision expected_revision = { 'id': '999', 'url': '/api/1/revision/999/', 'history_url': '/api/1/revision/999/log/', } mock_utils.parse_timestamp.return_value = 'parsed-date' mock_utils.enrich_revision.return_value = expected_revision - rv = self.app.get('/api/1/revision' - '/origin/1' - '/branch/refs%2Forigin%2Fdev' - '/ts/Today%20is%20' - 'January%201,%202047%20at%208:21:00AM/') + rv = self.client.get('/api/1/revision' + '/origin/1' + '/branch/refs%2Forigin%2Fdev' + '/ts/Today%20is%20' + 'January%201,%202047%20at%208:21:00AM/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEqual(response_data, expected_revision) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEqual(rv.data, expected_revision) mock_service.lookup_revision_by.assert_called_once_with( - 1, + '1', 'refs/origin/dev', 'parsed-date') mock_utils.parse_timestamp.assert_called_once_with( 'Today is January 1, 2047 at 8:21:00AM') mock_utils.enrich_revision.assert_called_once_with( mock_revision) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def revision_directory_by_ko_raise(self, mock_service): # given mock_service.lookup_directory_through_revision.side_effect = NotFoundExc('not') # noqa # when with self.assertRaises(NotFoundExc): - api._revision_directory_by( + views._revision_directory_by( {'sha1_git': 'id'}, None, '/api/1/revision/sha1/directory/') # then mock_service.lookup_directory_through_revision.assert_called_once_with( {'sha1_git': 'id'}, None, limit=100, with_data=False) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def revision_directory_by_type_dir(self, mock_service): # given mock_service.lookup_directory_through_revision.return_value = ( 'rev-id', { 'type': 'dir', 'revision': 'rev-id', 'path': 'some/path', 'content': [] }) # when - actual_dir_content = api._revision_directory_by( + actual_dir_content = views._revision_directory_by( {'sha1_git': 'blah-id'}, 'some/path', '/api/1/revision/sha1/directory/') # then self.assertEquals(actual_dir_content, { 'type': 'dir', 'revision': 'rev-id', 'path': 'some/path', 'content': [] }) mock_service.lookup_directory_through_revision.assert_called_once_with( {'sha1_git': 'blah-id'}, 'some/path', limit=100, with_data=False) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def revision_directory_by_type_file(self, mock_service): # given mock_service.lookup_directory_through_revision.return_value = ( 'rev-id', { 'type': 'file', 'revision': 'rev-id', 'path': 'some/path', 'content': {'blah': 'blah'} }) # when - actual_dir_content = api._revision_directory_by( + actual_dir_content = views._revision_directory_by( {'sha1_git': 'sha1'}, 'some/path', '/api/1/revision/origin/2/directory/', limit=1000, with_data=True) # then self.assertEquals(actual_dir_content, { 'type': 'file', 'revision': 'rev-id', 'path': 'some/path', 'content': {'blah': 'blah'} }) mock_service.lookup_directory_through_revision.assert_called_once_with( {'sha1_git': 'sha1'}, 'some/path', limit=1000, with_data=True) - @patch('swh.web.ui.views.api.utils') - @patch('swh.web.ui.views.api._revision_directory_by') + @patch('swh.web.api.views._revision_directory_by') + @patch('swh.web.api.views.utils') @istest def api_directory_through_revision_origin_ko_not_found(self, - mock_rev_dir, - mock_utils): + mock_utils, + mock_rev_dir): mock_rev_dir.side_effect = NotFoundExc('not found') mock_utils.parse_timestamp.return_value = '2012-10-20 00:00:00' - rv = self.app.get('/api/1/revision' - '/origin/10' - '/branch/refs/remote/origin/dev' - '/ts/2012-10-20' - '/directory/') + rv = self.client.get('/api/1/revision' + '/origin/10' + '/branch/refs/remote/origin/dev' + '/ts/2012-10-20' + '/directory/') # then self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEqual(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'not found'}) mock_rev_dir.assert_called_once_with( - {'origin_id': 10, + {'origin_id': '10', 'branch_name': 'refs/remote/origin/dev', 'ts': '2012-10-20 00:00:00'}, None, '/api/1/revision' '/origin/10' '/branch/refs/remote/origin/dev' '/ts/2012-10-20' '/directory/', with_data=False) - @patch('swh.web.ui.views.api._revision_directory_by') + @patch('swh.web.api.views._revision_directory_by') @istest def api_directory_through_revision_origin(self, mock_revision_dir): expected_res = [{ 'id': '123' }] mock_revision_dir.return_value = expected_res - rv = self.app.get('/api/1/revision/origin/3/directory/') + rv = self.client.get('/api/1/revision/origin/3/directory/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEqual(response_data, expected_res) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEqual(rv.data, expected_res) mock_revision_dir.assert_called_once_with({ - 'origin_id': 3, + 'origin_id': '3', 'branch_name': 'refs/heads/master', 'ts': None}, None, '/api/1/revision/origin/3/directory/', with_data=False) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_revision_log(self, mock_service): # given stub_revisions = [{ 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'author_name': 'Software Heritage', 'author_email': 'robot@softwareheritage.org', 'committer_name': 'Software Heritage', 'committer_email': 'robot@softwareheritage.org', 'message': 'synthetic revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': ['7834ef7e7c357ce2af928115c6c6a42b7e2a4345'], 'type': 'tar', 'synthetic': True, }] mock_service.lookup_revision_log.return_value = stub_revisions expected_revisions = [{ 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'url': '/api/1/revision/18d8be353ed3480476f032475e7c233eff7371d5/', 'history_url': '/api/1/revision/18d8be353ed3480476f032475e7c233ef' 'f7371d5/log/', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'directory_url': '/api/1/directory/7834ef7e7c357ce2af928115c6c6a' '42b7e2a44e6/', 'author_name': 'Software Heritage', 'author_email': 'robot@softwareheritage.org', 'committer_name': 'Software Heritage', 'committer_email': 'robot@softwareheritage.org', 'message': 'synthetic revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': [{ 'id': '7834ef7e7c357ce2af928115c6c6a42b7e2a4345', 'url': '/api/1/revision/7834ef7e7c357ce2af928115c6c6a42b7e2a4345/', # noqa }], 'type': 'tar', 'synthetic': True, }] # when - rv = self.app.get('/api/1/revision/8834ef7e7c357ce2af928115c6c6a42' - 'b7e2a44e6/log/') + rv = self.client.get('/api/1/revision/8834ef7e7c357ce2af928115c6c6a42' + 'b7e2a44e6/log/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') + self.assertEquals(rv['Content-Type'], 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, expected_revisions) - self.assertIsNone(rv.headers.get('Link')) + self.assertEquals(rv.data, expected_revisions) + self.assertFalse(rv.has_header('Link')) mock_service.lookup_revision_log.assert_called_once_with( '8834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 11) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_revision_log_with_next(self, mock_service): # given stub_revisions = [] for i in range(27): stub_revisions.append({'id': i}) mock_service.lookup_revision_log.return_value = stub_revisions[:26] expected_revisions = [x for x in stub_revisions if x['id'] < 25] for e in expected_revisions: e['url'] = '/api/1/revision/%s/' % e['id'] e['history_url'] = '/api/1/revision/%s/log/' % e['id'] # when - rv = self.app.get('/api/1/revision/8834ef7e7c357ce2af928115c6c6a42' - 'b7e2a44e6/log/?per_page=25') + rv = self.client.get('/api/1/revision/8834ef7e7c357ce2af928115c6c6a42' + 'b7e2a44e6/log/?per_page=25') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, expected_revisions) - self.assertEquals(rv.headers['Link'], + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, expected_revisions) + self.assertEquals(rv['Link'], '; rel="next"') mock_service.lookup_revision_log.assert_called_once_with( '8834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 26) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_revision_log_not_found(self, mock_service): # given mock_service.lookup_revision_log.return_value = None # when - rv = self.app.get('/api/1/revision/8834ef7e7c357ce2af928115c6c6a42b7' - 'e2a44e6/log/') + rv = self.client.get('/api/1/revision/8834ef7e7c357ce2af928115c6c6' + 'a42b7e2a44e6/log/') # then self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Revision with sha1_git' ' 8834ef7e7c357ce2af928115c6c6a42b7e2a44e6 not found.'}) - self.assertIsNone(rv.headers.get('Link')) + self.assertFalse(rv.has_header('Link')) mock_service.lookup_revision_log.assert_called_once_with( '8834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 11) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_revision_log_context(self, mock_service): # given stub_revisions = [{ 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'author_name': 'Software Heritage', 'author_email': 'robot@softwareheritage.org', 'committer_name': 'Software Heritage', 'committer_email': 'robot@softwareheritage.org', 'message': 'synthetic revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': ['7834ef7e7c357ce2af928115c6c6a42b7e2a4345'], 'type': 'tar', 'synthetic': True, }] mock_service.lookup_revision_log.return_value = stub_revisions mock_service.lookup_revision_multiple.return_value = [{ 'id': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'directory': '18d8be353ed3480476f032475e7c233eff7371d5', 'author_name': 'Name Surname', 'author_email': 'name@surname.com', 'committer_name': 'Name Surname', 'committer_email': 'name@surname.com', 'message': 'amazing revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': ['adc83b19e793491b1c6ea0fd8b46cd9f32e592fc'], 'type': 'tar', 'synthetic': True, }] expected_revisions = [ { 'url': '/api/1/revision/' '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6/', 'history_url': '/api/1/revision/' '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6/log/', 'id': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'directory': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory_url': '/api/1/directory/' '18d8be353ed3480476f032475e7c233eff7371d5/', 'author_name': 'Name Surname', 'author_email': 'name@surname.com', 'committer_name': 'Name Surname', 'committer_email': 'name@surname.com', 'message': 'amazing revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': [{ 'id': 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc', 'url': '/api/1/revision/adc83b19e793491b1c6ea0fd8b46cd9f32e592fc/', # noqa }], 'type': 'tar', 'synthetic': True, }, { 'url': '/api/1/revision/' '18d8be353ed3480476f032475e7c233eff7371d5/', 'history_url': '/api/1/revision/' '18d8be353ed3480476f032475e7c233eff7371d5/log/', 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'directory_url': '/api/1/directory/' '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6/', 'author_name': 'Software Heritage', 'author_email': 'robot@softwareheritage.org', 'committer_name': 'Software Heritage', 'committer_email': 'robot@softwareheritage.org', 'message': 'synthetic revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': [{ 'id': '7834ef7e7c357ce2af928115c6c6a42b7e2a4345', 'url': '/api/1/revision/7834ef7e7c357ce2af928115c6c6a42b7e2a4345/', # noqa }], 'type': 'tar', 'synthetic': True, }] # when - rv = self.app.get('/api/1/revision/18d8be353ed3480476f0' - '32475e7c233eff7371d5/prev/prev-rev/log/') + rv = self.client.get('/api/1/revision/18d8be353ed3480476f0' + '32475e7c233eff7371d5/prev/21145781e2' + '6ad1f978e/log/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(expected_revisions, response_data) - self.assertIsNone(rv.headers.get('Link')) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(expected_revisions, rv.data) + self.assertFalse(rv.has_header('Link')) mock_service.lookup_revision_log.assert_called_once_with( '18d8be353ed3480476f032475e7c233eff7371d5', 11) mock_service.lookup_revision_multiple.assert_called_once_with( - ['prev-rev']) + ['21145781e26ad1f978e']) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_revision_log_by(self, mock_service): # given stub_revisions = [{ 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'author_name': 'Software Heritage', 'author_email': 'robot@softwareheritage.org', 'committer_name': 'Software Heritage', 'committer_email': 'robot@softwareheritage.org', 'message': 'synthetic revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': ['7834ef7e7c357ce2af928115c6c6a42b7e2a4345'], 'type': 'tar', 'synthetic': True, }] mock_service.lookup_revision_log_by.return_value = stub_revisions expected_revisions = [{ 'id': '18d8be353ed3480476f032475e7c233eff7371d5', 'url': '/api/1/revision/18d8be353ed3480476f032475e7c233eff7371d5/', 'history_url': '/api/1/revision/18d8be353ed3480476f032475e7c233ef' 'f7371d5/log/', 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6', 'directory_url': '/api/1/directory/7834ef7e7c357ce2af928115c6c6a' '42b7e2a44e6/', 'author_name': 'Software Heritage', 'author_email': 'robot@softwareheritage.org', 'committer_name': 'Software Heritage', 'committer_email': 'robot@softwareheritage.org', 'message': 'synthetic revision message', 'date_offset': 0, 'committer_date_offset': 0, 'parents': [{ 'id': '7834ef7e7c357ce2af928115c6c6a42b7e2a4345', 'url': '/api/1/revision/7834ef7e7c357ce2af928115c6c6a42b7e2a4345/' # noqa }], 'type': 'tar', 'synthetic': True, }] # when - rv = self.app.get('/api/1/revision/origin/1/log/') + rv = self.client.get('/api/1/revision/origin/1/log/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, expected_revisions) - self.assertEquals(rv.headers.get('Link'), None) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, expected_revisions) + self.assertFalse(rv.has_header('Link')) mock_service.lookup_revision_log_by.assert_called_once_with( - 1, 'refs/heads/master', None, 11) + '1', 'refs/heads/master', None, 11) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_revision_log_by_with_next(self, mock_service): # given stub_revisions = [] for i in range(27): stub_revisions.append({'id': i}) mock_service.lookup_revision_log_by.return_value = stub_revisions[:26] expected_revisions = [x for x in stub_revisions if x['id'] < 25] for e in expected_revisions: e['url'] = '/api/1/revision/%s/' % e['id'] e['history_url'] = '/api/1/revision/%s/log/' % e['id'] # when - rv = self.app.get('/api/1/revision/origin/1/log/?per_page=25') + rv = self.client.get('/api/1/revision/origin/1/log/?per_page=25') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - self.assertIsNotNone(rv.headers['Link']) - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, expected_revisions) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertIsNotNone(rv['Link']) + self.assertEquals(rv.data, expected_revisions) mock_service.lookup_revision_log_by.assert_called_once_with( - 1, 'refs/heads/master', None, 26) + '1', 'refs/heads/master', None, 26) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_revision_log_by_norev(self, mock_service): # given mock_service.lookup_revision_log_by.side_effect = NotFoundExc( 'No revision') # when - rv = self.app.get('/api/1/revision/origin/1/log/') + rv = self.client.get('/api/1/revision/origin/1/log/') # then self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - self.assertIsNone(rv.headers.get('Link')) - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, {'exception': 'NotFoundExc', - 'reason': 'No revision'}) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertFalse(rv.has_header('Link')) + self.assertEquals(rv.data, {'exception': 'NotFoundExc', + 'reason': 'No revision'}) mock_service.lookup_revision_log_by.assert_called_once_with( - 1, 'refs/heads/master', None, 11) + '1', 'refs/heads/master', None, 11) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_revision_history(self, mock_service): # for readability purposes, we use: # - sha1 as 3 letters (url are way too long otherwise to respect pep8) # - only keys with modification steps (all other keys are kept as is) # given stub_revision = { 'id': '883', 'children': ['777', '999'], 'parents': [], 'directory': '272' } mock_service.lookup_revision.return_value = stub_revision # then - rv = self.app.get('/api/1/revision/883/prev/999/') + rv = self.client.get('/api/1/revision/883/prev/999/') self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'id': '883', 'url': '/api/1/revision/883/', 'history_url': '/api/1/revision/883/log/', 'history_context_url': '/api/1/revision/883/prev/999/log/', 'children': ['777', '999'], 'children_urls': ['/api/1/revision/777/', '/api/1/revision/999/'], 'parents': [], 'directory': '272', 'directory_url': '/api/1/directory/272/' }) mock_service.lookup_revision.assert_called_once_with('883') - @patch('swh.web.ui.views.api._revision_directory_by') + @patch('swh.web.api.views._revision_directory_by') @istest def api_revision_directory_ko_not_found(self, mock_rev_dir): # given mock_rev_dir.side_effect = NotFoundExc('Not found') # then - rv = self.app.get('/api/1/revision/999/directory/some/path/to/dir/') + rv = self.client.get('/api/1/revision/999/directory/some/path/to/dir/') self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Not found'}) mock_rev_dir.assert_called_once_with( {'sha1_git': '999'}, 'some/path/to/dir', '/api/1/revision/999/directory/some/path/to/dir/', with_data=False) - @patch('swh.web.ui.views.api._revision_directory_by') + @patch('swh.web.api.views._revision_directory_by') @istest def api_revision_directory_ok_returns_dir_entries(self, mock_rev_dir): stub_dir = { 'type': 'dir', 'revision': '999', 'content': [ { 'sha1_git': '789', 'type': 'file', 'target': '101', 'target_url': '/api/1/content/sha1_git:101/', 'name': 'somefile', 'file_url': '/api/1/revision/999/directory/some/path/' 'somefile/' }, { 'sha1_git': '123', 'type': 'dir', 'target': '456', 'target_url': '/api/1/directory/456/', 'name': 'to-subdir', 'dir_url': '/api/1/revision/999/directory/some/path/' 'to-subdir/', }] } # given mock_rev_dir.return_value = stub_dir # then - rv = self.app.get('/api/1/revision/999/directory/some/path/') + rv = self.client.get('/api/1/revision/999/directory/some/path/') self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, stub_dir) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, stub_dir) mock_rev_dir.assert_called_once_with( {'sha1_git': '999'}, 'some/path', '/api/1/revision/999/directory/some/path/', with_data=False) - @patch('swh.web.ui.views.api._revision_directory_by') + @patch('swh.web.api.views._revision_directory_by') @istest def api_revision_directory_ok_returns_content(self, mock_rev_dir): stub_content = { 'type': 'file', 'revision': '999', 'content': { 'sha1_git': '789', 'sha1': '101', 'data_url': '/api/1/content/101/raw/', } } # given mock_rev_dir.return_value = stub_content # then url = '/api/1/revision/666/directory/some/other/path/' - rv = self.app.get(url) + rv = self.client.get(url) self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, stub_content) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, stub_content) mock_rev_dir.assert_called_once_with( {'sha1_git': '666'}, 'some/other/path', url, with_data=False) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_person(self, mock_service): # given stub_person = { 'id': '198003', 'name': 'Software Heritage', 'email': 'robot@softwareheritage.org', } mock_service.lookup_person.return_value = stub_person # when - rv = self.app.get('/api/1/person/198003/') + rv = self.client.get('/api/1/person/198003/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, stub_person) - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, stub_person) - - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_person_not_found(self, mock_service): # given mock_service.lookup_person.return_value = None # when - rv = self.app.get('/api/1/person/666/') + rv = self.client.get('/api/1/person/666/') # then self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Person with id 666 not found.'}) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_directory(self, mock_service): # given stub_directories = [ { 'sha1_git': '18d8be353ed3480476f032475e7c233eff7371d5', 'type': 'file', 'target': '4568be353ed3480476f032475e7c233eff737123', }, { 'sha1_git': '1d518d8be353ed3480476f032475e7c233eff737', 'type': 'dir', 'target': '8be353ed3480476f032475e7c233eff737123456', }] expected_directories = [ { 'sha1_git': '18d8be353ed3480476f032475e7c233eff7371d5', 'type': 'file', 'target': '4568be353ed3480476f032475e7c233eff737123', 'target_url': '/api/1/content/' 'sha1_git:4568be353ed3480476f032475e7c233eff737123/', }, { 'sha1_git': '1d518d8be353ed3480476f032475e7c233eff737', 'type': 'dir', 'target': '8be353ed3480476f032475e7c233eff737123456', 'target_url': '/api/1/directory/8be353ed3480476f032475e7c233eff737123456/', }] mock_service.lookup_directory.return_value = stub_directories # when - rv = self.app.get('/api/1/directory/' - '18d8be353ed3480476f032475e7c233eff7371d5/') + rv = self.client.get('/api/1/directory/' + '18d8be353ed3480476f032475e7c233eff7371d5/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, expected_directories) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, expected_directories) mock_service.lookup_directory.assert_called_once_with( '18d8be353ed3480476f032475e7c233eff7371d5') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_directory_not_found(self, mock_service): # given mock_service.lookup_directory.return_value = [] # when - rv = self.app.get('/api/1/directory/' - '66618d8be353ed3480476f032475e7c233eff737/') + rv = self.client.get('/api/1/directory/' + '66618d8be353ed3480476f032475e7c233eff737/') # then self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Directory with sha1_git ' '66618d8be353ed3480476f032475e7c233eff737 not found.'}) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_directory_with_path_found(self, mock_service): # given expected_dir = { 'sha1_git': '18d8be353ed3480476f032475e7c233eff7371d5', 'type': 'file', 'name': 'bla', 'target': '4568be353ed3480476f032475e7c233eff737123', 'target_url': '/api/1/content/' 'sha1_git:4568be353ed3480476f032475e7c233eff737123/', } mock_service.lookup_directory_with_path.return_value = expected_dir # when - rv = self.app.get('/api/1/directory/' - '18d8be353ed3480476f032475e7c233eff7371d5/bla/') + rv = self.client.get('/api/1/directory/' + '18d8be353ed3480476f032475e7c233eff7371d5/bla/') # then self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, expected_dir) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, expected_dir) mock_service.lookup_directory_with_path.assert_called_once_with( '18d8be353ed3480476f032475e7c233eff7371d5', 'bla') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_directory_with_path_not_found(self, mock_service): # given mock_service.lookup_directory_with_path.return_value = None path = 'some/path/to/dir/' # when - rv = self.app.get(('/api/1/directory/' - '66618d8be353ed3480476f032475e7c233eff737/%s') - % path) + rv = self.client.get(('/api/1/directory/' + '66618d8be353ed3480476f032475e7c233eff737/%s') + % path) path = path.strip('/') # Path stripped of lead/trail separators # then self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', 'reason': (('Entry with path %s relative to ' 'directory with sha1_git ' '66618d8be353ed3480476f032475e7c233eff737 not found.') % path)}) - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_lookup_entity_by_uuid_not_found(self, mock_service): # when mock_service.lookup_entity_by_uuid.return_value = [] # when - rv = self.app.get('/api/1/entity/' - '5f4d4c51-498a-4e28-88b3-b3e4e8396cba/') + rv = self.client.get('/api/1/entity/' + '5f4d4c51-498a-4e28-88b3-b3e4e8396cba/') self.assertEquals(rv.status_code, 404) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'NotFoundExc', 'reason': "Entity with uuid '5f4d4c51-498a-4e28-88b3-b3e4e8396cba' not " + "found."}) mock_service.lookup_entity_by_uuid.assert_called_once_with( '5f4d4c51-498a-4e28-88b3-b3e4e8396cba') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_lookup_entity_by_uuid_bad_request(self, mock_service): # when mock_service.lookup_entity_by_uuid.side_effect = BadInputExc( 'bad input: uuid malformed!') # when - rv = self.app.get('/api/1/entity/uuid malformed/') + rv = self.client.get('/api/1/entity/uuid malformed/') self.assertEquals(rv.status_code, 400) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, { + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, { 'exception': 'BadInputExc', 'reason': 'bad input: uuid malformed!'}) mock_service.lookup_entity_by_uuid.assert_called_once_with( 'uuid malformed') - @patch('swh.web.ui.views.api.service') + @patch('swh.web.api.views.service') @istest def api_lookup_entity_by_uuid(self, mock_service): # when stub_entities = [ { 'uuid': '34bd6b1b-463f-43e5-a697-785107f598e4', 'parent': 'aee991a0-f8d7-4295-a201-d1ce2efc9fb2' }, { 'uuid': 'aee991a0-f8d7-4295-a201-d1ce2efc9fb2' } ] mock_service.lookup_entity_by_uuid.return_value = stub_entities expected_entities = [ { 'uuid': '34bd6b1b-463f-43e5-a697-785107f598e4', 'uuid_url': '/api/1/entity/34bd6b1b-463f-43e5-a697-' '785107f598e4/', 'parent': 'aee991a0-f8d7-4295-a201-d1ce2efc9fb2', 'parent_url': '/api/1/entity/aee991a0-f8d7-4295-a201-' 'd1ce2efc9fb2/' }, { 'uuid': 'aee991a0-f8d7-4295-a201-d1ce2efc9fb2', 'uuid_url': '/api/1/entity/aee991a0-f8d7-4295-a201-' 'd1ce2efc9fb2/' } ] # when - rv = self.app.get('/api/1/entity' - '/34bd6b1b-463f-43e5-a697-785107f598e4/') + rv = self.client.get('/api/1/entity' + '/34bd6b1b-463f-43e5-a697-785107f598e4/') self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.mimetype, 'application/json') - - response_data = json.loads(rv.data.decode('utf-8')) - self.assertEquals(response_data, expected_entities) + self.assertEquals(rv['Content-Type'], 'application/json') + self.assertEquals(rv.data, expected_entities) mock_service.lookup_entity_by_uuid.assert_called_once_with( '34bd6b1b-463f-43e5-a697-785107f598e4') class ApiUtils(unittest.TestCase): @istest def api_lookup_not_found(self): # when - with self.assertRaises(exc.NotFoundExc) as e: - api._api_lookup( + with self.assertRaises(NotFoundExc) as e: + views._api_lookup( lambda x: None, 'something', notfound_msg='this is the error message raised as it is None') self.assertEqual(e.exception.args[0], 'this is the error message raised as it is None') @istest def api_lookup_with_result(self): # when - actual_result = api._api_lookup( + actual_result = views._api_lookup( lambda x: x + '!', 'something', notfound_msg='this is the error which won\'t be used here') self.assertEqual(actual_result, 'something!') @istest def api_lookup_with_result_as_map(self): # when - actual_result = api._api_lookup( + actual_result = views._api_lookup( lambda x: map(lambda y: y+1, x), [1, 2, 3], notfound_msg='this is the error which won\'t be used here') self.assertEqual(actual_result, [2, 3, 4]) diff --git a/swh/web/api/urls.py b/swh/web/api/urls.py new file mode 100644 index 00000000..7d5f1504 --- /dev/null +++ b/swh/web/api/urls.py @@ -0,0 +1,8 @@ +# Copyright (C) 2017 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from swh.web.api import views + +urlpatterns = views.get_url_patterns() diff --git a/swh/web/ui/utils.py b/swh/web/api/utils.py similarity index 74% rename from swh/web/ui/utils.py rename to swh/web/api/utils.py index 9a99d2aa..f0f91cae 100644 --- a/swh/web/ui/utils.py +++ b/swh/web/api/utils.py @@ -1,410 +1,400 @@ # Copyright (C) 2015-2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information - -import flask import re +from django.urls import reverse from datetime import datetime, timezone from dateutil import parser from .exc import BadInputExc def filter_endpoints(url_map, prefix_url_rule, blacklist=[]): """Filter endpoints by prefix url rule. Args: - url_map: Url Werkzeug.Map of rules - prefix_url_rule: prefix url string - blacklist: blacklist of some url Returns: Dictionary of url_rule with values methods and endpoint. The key is the url, the associated value is a dictionary of 'methods' (possible http methods) and 'endpoint' (python function) """ out = {} for r in url_map: rule = r['rule'] if rule == prefix_url_rule or rule in blacklist: continue if rule.startswith(prefix_url_rule): out[rule] = {'methods': sorted(map(str, r['methods'])), 'endpoint': r['endpoint']} return out def fmap(f, data): """Map f to data at each level. This must keep the origin data structure type: - map -> map - dict -> dict - list -> list - None -> None Args: f: function that expects one argument. data: data to traverse to apply the f function. list, map, dict or bare value. Returns: The same data-structure with modified values by the f function. """ if data is None: return data if isinstance(data, map): return map(lambda y: fmap(f, y), (x for x in data)) if isinstance(data, list): return [fmap(f, x) for x in data] if isinstance(data, dict): return {k: fmap(f, v) for (k, v) in data.items()} return f(data) def prepare_data_for_view(data, encoding='utf-8'): def prepare_data(s): # Note: can only be 'data' key with bytes of raw content if isinstance(s, bytes): try: return s.decode(encoding) except: return "Cannot decode the data bytes, try and set another " \ "encoding in the url (e.g. ?encoding=utf8) or " \ "download directly the " \ "content's raw data." if isinstance(s, str): return re.sub(r'/api/1/', r'/browse/', s) return s return fmap(prepare_data, data) def filter_field_keys(data, field_keys): """Given an object instance (directory or list), and a csv field keys to filter on. Return the object instance with filtered keys. Note: Returns obj as is if it's an instance of types not in (dictionary, list) Args: - data: one object (dictionary, list...) to filter. - field_keys: csv or set of keys to filter the object on Returns: obj filtered on field_keys """ if isinstance(data, map): return map(lambda x: filter_field_keys(x, field_keys), data) if isinstance(data, list): return [filter_field_keys(x, field_keys) for x in data] if isinstance(data, dict): return {k: v for (k, v) in data.items() if k in field_keys} return data def person_to_string(person): """Map a person (person, committer, tagger, etc...) to a string. """ return ''.join([person['name'], ' <', person['email'], '>']) def parse_timestamp(timestamp): """Given a time or timestamp (as string), parse the result as datetime. Returns: a timezone-aware datetime representing the parsed value. None if the parsing fails. Samples: - 2016-01-12 - 2016-01-12T09:19:12+0100 - Today is January 1, 2047 at 8:21:00AM - 1452591542 """ if not timestamp: return None try: return parser.parse(timestamp, ignoretz=False, fuzzy=True) except: try: return datetime.utcfromtimestamp(float(timestamp)).replace( tzinfo=timezone.utc) except (ValueError, OverflowError) as e: raise BadInputExc(e) def enrich_object(object): """Enrich an object (revision, release) with link to the 'target' of type 'target_type'. Args: object: An object with target and target_type keys (e.g. release, revision) Returns: Object enriched with target_url pointing to the right swh.web.ui.api urls for the pointing object (revision, release, content, directory) """ obj = object.copy() if 'target' in obj and 'target_type' in obj: if obj['target_type'] == 'revision': - obj['target_url'] = flask.url_for('api_revision', - sha1_git=obj['target']) + obj['target_url'] = reverse('revision', + kwargs={'sha1_git': obj['target']}) elif obj['target_type'] == 'release': - obj['target_url'] = flask.url_for('api_release', - sha1_git=obj['target']) + obj['target_url'] = reverse('release', + kwargs={'sha1_git': obj['target']}) elif obj['target_type'] == 'content': - obj['target_url'] = flask.url_for( - 'api_content_metadata', - q='sha1_git:' + obj['target']) + obj['target_url'] = \ + reverse('content', kwargs={'q': 'sha1_git:' + obj['target']}) elif obj['target_type'] == 'directory': - obj['target_url'] = flask.url_for('api_directory', - q=obj['target']) + obj['target_url'] = reverse('directory', + kwargs={'sha1_git': obj['target']}) if 'author' in obj: author = obj['author'] - obj['author_url'] = flask.url_for('api_person', - person_id=author['id']) + obj['author_url'] = reverse('person', + kwargs={'person_id': author['id']}) return obj enrich_release = enrich_object def enrich_directory(directory, context_url=None): """Enrich directory with url to content or directory. """ if 'type' in directory: target_type = directory['type'] target = directory['target'] if target_type == 'file': - directory['target_url'] = flask.url_for('api_content_metadata', - q='sha1_git:%s' % target) + directory['target_url'] = \ + reverse('content', kwargs={'q': 'sha1_git:%s' % target}) if context_url: directory['file_url'] = context_url + directory['name'] + '/' else: - directory['target_url'] = flask.url_for('api_directory', - sha1_git=target) + directory['target_url'] = reverse('directory', + kwargs={'sha1_git': target}) if context_url: directory['dir_url'] = context_url + directory['name'] + '/' return directory def enrich_metadata_endpoint(content): """Enrich metadata endpoint with link to the upper metadata endpoint. """ c = content.copy() - c['content_url'] = flask.url_for('api_content_metadata', - q='sha1:%s' % c['id']) + c['content_url'] = reverse('content', args=['sha1:%s' % c['id']]) return c def enrich_content(content, top_url=False): """Enrich content with links to: - data_url: its raw data - filetype_url: its filetype information """ for h in ['sha1', 'sha1_git', 'sha256']: if h in content: q = '%s:%s' % (h, content[h]) if top_url: - content['content_url'] = flask.url_for('api_content_metadata', - q=q) - content['data_url'] = flask.url_for('api_content_raw', q=q) - content['filetype_url'] = flask.url_for('api_content_filetype', - q=q) - content['language_url'] = flask.url_for('api_content_language', - q=q) - content['license_url'] = flask.url_for('api_content_license', - q=q) + content['content_url'] = reverse('content', kwargs={'q': q}) + content['data_url'] = reverse('content-raw', kwargs={'q': q}) + content['filetype_url'] = reverse('content-filetype', + kwargs={'q': q}) + content['language_url'] = reverse('content-language', + kwargs={'q': q}) + content['license_url'] = reverse('content-license', + kwargs={'q': q}) break return content def enrich_entity(entity): """Enrich entity with """ if 'uuid' in entity: - entity['uuid_url'] = flask.url_for('api_entity_by_uuid', - uuid=entity['uuid']) + entity['uuid_url'] = reverse('entity', + kwargs={'uuid': entity['uuid']}) if 'parent' in entity and entity['parent']: - entity['parent_url'] = flask.url_for('api_entity_by_uuid', - uuid=entity['parent']) + entity['parent_url'] = reverse('entity', + kwargs={'uuid': entity['parent']}) return entity def _get_path_list(path_string): """Helper for enrich_revision: get a list of the sha1 id of the navigation breadcrumbs, ordered from the oldest to the most recent. Args: path_string: the path as a '/'-separated string Returns: The navigation context as a list of sha1 revision ids """ return path_string.split('/') def _get_revision_contexts(rev_id, context): """Helper for enrich_revision: retrieve for the revision id and potentially the navigation breadcrumbs the context to pass to parents and children of of the revision. Args: rev_id: the revision's sha1 id context: the current navigation context Returns: The context for parents, children and the url of the direct child as a tuple in that order. """ context_for_parents = None context_for_children = None url_direct_child = None if not context: return (rev_id, None, None) path_list = _get_path_list(context) context_for_parents = '%s/%s' % (context, rev_id) prev_for_children = path_list[:-1] if len(prev_for_children) > 0: context_for_children = '/'.join(prev_for_children) child_id = path_list[-1] # This commit is not the first commit in the path if context_for_children: - url_direct_child = flask.url_for( - 'api_revision', - sha1_git=child_id, - context=context_for_children) + url_direct_child = reverse('revision-context', + kwargs={'sha1_git': child_id, + 'context': context_for_children}) # This commit is the first commit in the path else: - url_direct_child = flask.url_for( - 'api_revision', - sha1_git=child_id) + url_direct_child = reverse('revision', kwargs={'sha1_git': child_id}) return (context_for_parents, context_for_children, url_direct_child) def _make_child_url(rev_children, context): """Helper for enrich_revision: retrieve the list of urls corresponding to the children of the current revision according to the navigation breadcrumbs. Args: rev_children: a list of revision id context: the '/'-separated navigation breadcrumbs Returns: the list of the children urls according to the context """ children = [] for child in rev_children: if context and child != _get_path_list(context)[-1]: - children.append(flask.url_for('api_revision', sha1_git=child)) + children.append(reverse('revision', + kwargs={'sha1_git': child})) elif not context: - children.append(flask.url_for('api_revision', sha1_git=child)) + children.append(reverse('revision', kwargs={'sha1_git': child})) return children def enrich_revision(revision, context=None): """Enrich revision with links where it makes sense (directory, parents). Keep track of the navigation breadcrumbs if they are specified. Args: revision: the revision as a dict context: the navigation breadcrumbs as a /-separated string of revision sha1_git """ ctx_parents, ctx_children, url_direct_child = _get_revision_contexts( revision['id'], context) - revision['url'] = flask.url_for('api_revision', sha1_git=revision['id']) - revision['history_url'] = flask.url_for('api_revision_log', - sha1_git=revision['id']) + revision['url'] = reverse('revision', kwargs={'sha1_git': revision['id']}) + revision['history_url'] = reverse('revision-log', + kwargs={'sha1_git': revision['id']}) if context: - revision['history_context_url'] = flask.url_for( - 'api_revision_log', - sha1_git=revision['id'], - prev_sha1s=context) + revision['history_context_url'] = reverse( + 'revision-log', kwargs={'sha1_git': revision['id'], + 'prev_sha1s': context}) if 'author' in revision: author = revision['author'] - revision['author_url'] = flask.url_for('api_person', - person_id=author['id']) + revision['author_url'] = reverse('person', + kwargs={'person_id': author['id']}) if 'committer' in revision: committer = revision['committer'] - revision['committer_url'] = flask.url_for('api_person', - person_id=committer['id']) + revision['committer_url'] = \ + reverse('person', kwargs={'person_id': committer['id']}) if 'directory' in revision: - revision['directory_url'] = flask.url_for( - 'api_directory', - sha1_git=revision['directory']) + revision['directory_url'] = \ + reverse('directory', kwargs={'sha1_git': revision['directory']}) if 'parents' in revision: parents = [] for parent in revision['parents']: parents.append({ 'id': parent, - 'url': flask.url_for('api_revision', - sha1_git=parent) + 'url': reverse('revision', kwargs={'sha1_git': parent}) }) revision['parents'] = parents if 'children' in revision: children = _make_child_url(revision['children'], context) if url_direct_child: children.append(url_direct_child) revision['children_urls'] = children else: if url_direct_child: revision['children_urls'] = [url_direct_child] if 'message_decoding_failed' in revision: - revision['message_url'] = flask.url_for( - 'api_revision_raw_message', - sha1_git=revision['id']) + revision['message_url'] = reverse('revision-raw-message', + kwargs={'sha1_git': revision['id']}) return revision def shorten_path(path): """Shorten the given path: for each hash present, only return the first 8 characters followed by an ellipsis""" sha256_re = r'([0-9a-f]{8})[0-9a-z]{56}' sha1_re = r'([0-9a-f]{8})[0-9a-f]{32}' ret = re.sub(sha256_re, r'\1...', path) return re.sub(sha1_re, r'\1...', ret) diff --git a/swh/web/api/views.py b/swh/web/api/views.py new file mode 100644 index 00000000..966200ed --- /dev/null +++ b/swh/web/api/views.py @@ -0,0 +1,1112 @@ +# Copyright (C) 2017 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import functools + +from django.http import QueryDict +from django.conf.urls import url +from django.urls import reverse +from django.http import HttpResponse + +from rest_framework.response import Response +from rest_framework.decorators import api_view + +from types import GeneratorType + +from swh.web.api import service, utils +from swh.web.api import apidoc as api_doc +from swh.web.api.exc import NotFoundExc, ForbiddenExc +from swh.web.api.apiurls import APIUrls, api_route + +# canned doc string snippets that are used in several doc strings +_doc_arg_content_id = """A "[hash_type:]hash" content identifier, where + hash_type is one of "sha1" (the default), "sha1_git", "sha256", and hash is + a checksum obtained with the hash_type hashing algorithm.""" +_doc_arg_last_elt = 'element to start listing from, for pagination purposes' +_doc_arg_per_page = 'number of elements to list, for pagination purposes' + +_doc_exc_bad_id = 'syntax error in the given identifier(s)' +_doc_exc_id_not_found = 'no object matching the given criteria could be found' + +_doc_ret_revision_meta = 'metadata of the revision identified by sha1_git' +_doc_ret_revision_log = """list of dictionaries representing the metadata of + each revision found in the commit log heading to revision sha1_git. + For each commit at least the following information are returned: + author/committer, authoring/commit timestamps, revision id, commit message, + parent (i.e., immediately preceding) commits, "root" directory id.""" + +_doc_header_link = """indicates that a subsequent result page is available, + pointing to it""" + + +def get_url_patterns(): + return APIUrls.get_url_patterns() + + +def _api_lookup(lookup_fn, *args, + notfound_msg='Object not found', + enrich_fn=lambda x: x): + """Capture a redundant behavior of: + - looking up the backend with a criteria (be it an identifier or checksum) + passed to the function lookup_fn + - if nothing is found, raise an NotFoundExc exception with error + message notfound_msg. + - Otherwise if something is returned: + - either as list, map or generator, map the enrich_fn function to it + and return the resulting data structure as list. + - either as dict and pass to enrich_fn and return the dict enriched. + + Args: + - criteria: discriminating criteria to lookup + - lookup_fn: function expects one criteria and optional supplementary + *args. + - notfound_msg: if nothing matching the criteria is found, + raise NotFoundExc with this error message. + - enrich_fn: Function to use to enrich the result returned by + lookup_fn. Default to the identity function if not provided. + - *args: supplementary arguments to pass to lookup_fn. + + Raises: + NotFoundExp or whatever `lookup_fn` raises. + + """ + res = lookup_fn(*args) + if not res: + raise NotFoundExc(notfound_msg) + if isinstance(res, (map, list, GeneratorType)): + return [enrich_fn(x) for x in res] + return enrich_fn(res) + + +@api_view() +def api_home(request): + return Response({}, template_name='api.html') + + +APIUrls.urlpatterns.append(url(r'^$', api_home, name='homepage')) + + +@api_route(r'/', 'endpoints') +def api_endpoints(request): + """Display the list of opened api endpoints. + + """ + routes = APIUrls.get_app_endpoints().copy() + for route, doc in routes.items(): + doc['doc_intro'] = doc['docstring'].split('\n\n')[0] + # Return a list of routes with consistent ordering + env = { + 'doc_routes': sorted(routes.items()) + } + return Response(env, template_name="api-endpoints.html") + + +@api_route(r'/origin/(?P[0-9]+)/', 'origin') +@api_route(r'/origin/(?P[a-z]+)/url/(?P.+)', + 'origin') +@api_doc.route('/origin/') +@api_doc.arg('origin_id', + default=1, + argtype=api_doc.argtypes.int, + argdoc='origin identifier (when looking up by ID)') +@api_doc.arg('origin_type', + default='git', + argtype=api_doc.argtypes.str, + argdoc='origin type (when looking up by type+URL)') +@api_doc.arg('origin_url', + default='https://github.com/hylang/hy', + argtype=api_doc.argtypes.path, + argdoc='origin URL (when looking up by type+URL)') +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.dict, + retdoc="""The metadata of the origin corresponding to the given + criteria""") +def api_origin(request, origin_id=None, origin_type=None, origin_url=None): + """Get information about a software origin. + + Software origins might be looked up by origin type and canonical URL (e.g., + "git" + a "git clone" URL), or by their unique (but otherwise meaningless) + identifier. + + """ + ori_dict = { + 'id': origin_id, + 'type': origin_type, + 'url': origin_url + } + ori_dict = {k: v for k, v in ori_dict.items() if ori_dict[k]} + if 'id' in ori_dict: + error_msg = 'Origin with id %s not found.' % ori_dict['id'] + else: + error_msg = 'Origin with type %s and URL %s not found' % ( + ori_dict['type'], ori_dict['url']) + + def _enrich_origin(origin): + if 'id' in origin: + o = origin.copy() + o['origin_visits_url'] = \ + reverse('origin-visits', kwargs={'origin_id': origin['id']}) + return o + + return origin + + return _api_lookup( + service.lookup_origin, ori_dict, + notfound_msg=error_msg, + enrich_fn=_enrich_origin) + + +@api_route(r'/stat/counters/', 'stat-counters') +@api_doc.route('/stat/counters/', noargs=True) +@api_doc.returns(rettype=api_doc.rettypes.dict, + retdoc="""dictionary mapping object types to the amount of + corresponding objects currently available in the archive""") +def api_stats(request): + """Get statistics about the content of the archive. + + """ + return service.stat_counters() + + +@api_route(r'/origin/(?P[0-9]+)/visits/', 'origin-visits') +@api_doc.route('/origin/visits/') +@api_doc.arg('origin_id', + default=1, + argtype=api_doc.argtypes.int, + argdoc='software origin identifier') +@api_doc.header('Link', doc=_doc_header_link) +@api_doc.param('last_visit', default=None, + argtype=api_doc.argtypes.int, + doc=_doc_arg_last_elt) +@api_doc.param('per_page', default=10, + argtype=api_doc.argtypes.int, + doc=_doc_arg_per_page) +@api_doc.returns(rettype=api_doc.rettypes.list, + retdoc="""a list of dictionaries describing individual visits. + For each visit, its identifier, timestamp (as UNIX time), + outcome, and visit-specific URL for more information are + given.""") +def api_origin_visits(request, origin_id): + """Get information about all visits of a given software origin. + + """ + result = {} + per_page = int(request.query_params.get('per_page', '10')) + last_visit = request.query_params.get('last_visit') + if last_visit: + last_visit = int(last_visit) + + def _lookup_origin_visits( + origin_id, last_visit=last_visit, per_page=per_page): + return service.lookup_origin_visits( + origin_id, last_visit=last_visit, per_page=per_page) + + def _enrich_origin_visit(origin_visit): + ov = origin_visit.copy() + ov['origin_visit_url'] = reverse('origin-visit', + kwargs={'origin_id': origin_id, + 'visit_id': ov['visit']}) + return ov + + r = _api_lookup( + _lookup_origin_visits, origin_id, + notfound_msg='No origin {} found'.format(origin_id), + enrich_fn=_enrich_origin_visit) + + if r: + l = len(r) + if l == per_page: + new_last_visit = r[-1]['visit'] + query_params = QueryDict('', mutable=True) + query_params['last_visit'] = new_last_visit + + if request.query_params.get('per_page'): + query_params['per_page'] = per_page + + result['headers'] = { + 'link-next': reverse('origin-visits', + kwargs={'origin_id': origin_id}) + + '?' + query_params.urlencode() + } + + result.update({ + 'results': r + }) + + return result + + +@api_route(r'/origin/(?P[0-9]+)/visit/(?P[0-9]+)/', + 'origin-visit') +@api_doc.route('/origin/visit/') +@api_doc.arg('origin_id', + default=1, + argtype=api_doc.argtypes.int, + argdoc='software origin identifier') +@api_doc.arg('visit_id', + default=1, + argtype=api_doc.argtypes.int, + argdoc="""visit identifier, relative to the origin identified by + origin_id""") +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.dict, + retdoc="""dictionary containing both metadata for the entire + visit (e.g., timestamp as UNIX time, visit outcome, etc.) and + what was at the software origin during the visit (i.e., a + mapping from branches to other archive objects)""") +def api_origin_visit(request, origin_id, visit_id): + """Get information about a specific visit of a software origin. + + """ + def _enrich_origin_visit(origin_visit): + ov = origin_visit.copy() + ov['origin_url'] = reverse('origin', + kwargs={'origin_id': ov['origin']}) + if 'occurrences' in ov: + ov['occurrences'] = { + k: utils.enrich_object(v) + for k, v in ov['occurrences'].items() + } + return ov + + return _api_lookup( + service.lookup_origin_visit, origin_id, visit_id, + notfound_msg=('No visit {} for origin {} found' + .format(visit_id, origin_id)), + enrich_fn=_enrich_origin_visit) + + +@api_route(r'/content/symbol/search/', 'content-symbol', methods=['POST']) +@api_route(r'/content/symbol/(?P.+)/', 'content-symbol') +@api_doc.route('/content/symbol/', tags=['upcoming']) +@api_doc.arg('q', + default='hello', + argtype=api_doc.argtypes.str, + argdoc="""An expression string to lookup in swh's raw content""") +@api_doc.header('Link', doc=_doc_header_link) +@api_doc.param('last_sha1', default=None, + argtype=api_doc.argtypes.str, + doc=_doc_arg_last_elt) +@api_doc.param('per_page', default=10, + argtype=api_doc.argtypes.int, + doc=_doc_arg_per_page) +@api_doc.returns(rettype=api_doc.rettypes.list, + retdoc="""A list of dict whose content matches the expression. + Each dict has the following keys: + - id (bytes): identifier of the content + - name (text): symbol whose content match the expression + - kind (text): kind of the symbol that matched + - lang (text): Language for that entry + - line (int): Number line for the symbol + """) +def api_content_symbol(request, q=None): + """Search content objects by `Ctags `_-style + symbol (e.g., function name, data type, method, ...). + + """ + result = {} + last_sha1 = request.query_params.get('last_sha1', None) + per_page = int(request.query_params.get('per_page', '10')) + + def lookup_exp(exp, last_sha1=last_sha1, per_page=per_page): + return service.lookup_expression(exp, last_sha1, per_page) + + symbols = _api_lookup( + lookup_exp, q, + notfound_msg="No indexed raw content match expression '{}'.".format(q), + enrich_fn=functools.partial(utils.enrich_content, top_url=True)) + + if symbols: + l = len(symbols) + + if l == per_page: + query_params = QueryDict('', mutable=True) + new_last_sha1 = symbols[-1]['sha1'] + query_params['last_sha1'] = new_last_sha1 + if request.query_params.get('per_page'): + query_params['per_page'] = per_page + + result['headers'] = { + 'link-next': reverse('content-symbol', kwargs={'q': q}) + '?' + + query_params.urlencode() + } + + result.update({ + 'results': symbols + }) + + return result + + +@api_route(r'/content/known/search/', 'content-known', methods=['POST']) +@api_route(r'/content/known/(?P(?!search).*)/', 'content-known') +@api_doc.route('/content/known/', tags=['hidden']) +@api_doc.arg('q', + default='adc83b19e793491b1c6ea0fd8b46cd9f32e592fc', + argtype=api_doc.argtypes.sha1, + argdoc='content identifier as a sha1 checksum') +@api_doc.param('q', default=None, + argtype=api_doc.argtypes.str, + doc="""(POST request) An algo_hash:hash string, where algo_hash + is one of sha1, sha1_git or sha256 and hash is the hash to + search for in SWH""") +@api_doc.raises(exc=api_doc.excs.badinput, doc=_doc_exc_bad_id) +@api_doc.returns(rettype=api_doc.rettypes.dict, + retdoc="""a dictionary with results (found/not found for each given + identifier) and statistics about how many identifiers + were found""") +def api_check_content_known(request, q=None): + """Check whether some content (AKA "blob") is present in the archive. + + Lookup can be performed by various means: + + - a GET request with one or several hashes, separated by ',' + - a POST request with one or several hashes, passed as (multiple) values + for parameter 'q' + + """ + response = {'search_res': None, + 'search_stats': None} + search_stats = {'nbfiles': 0, 'pct': 0} + search_res = None + + queries = [] + # GET: Many hash separated values request + if q: + hashes = q.split(',') + for v in hashes: + queries.append({'filename': None, 'sha1': v}) + + # POST: Many hash requests in post form submission + elif request.method == 'POST': + data = request.data + # Remove potential inputs with no associated value + for k, v in data.items(): + if v is not None: + if k == 'q' and len(v) > 0: + queries.append({'filename': None, 'sha1': v}) + elif v != '': + queries.append({'filename': k, 'sha1': v}) + + if queries: + lookup = service.lookup_multiple_hashes(queries) + result = [] + l = len(queries) + for el in lookup: + res_d = {'sha1': el['sha1'], + 'found': el['found']} + if 'filename' in el and el['filename']: + res_d['filename'] = el['filename'] + result.append(res_d) + search_res = result + nbfound = len([x for x in lookup if x['found']]) + search_stats['nbfiles'] = l + search_stats['pct'] = (nbfound / l) * 100 + + response['search_res'] = search_res + response['search_stats'] = search_stats + return response + + +@api_route(r'/person/(?P[0-9]+)/', 'person') +@api_doc.route('/person/') +@api_doc.arg('person_id', + default=42, + argtype=api_doc.argtypes.int, + argdoc='person identifier') +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.dict, + retdoc='The metadata of the person identified by person_id') +def api_person(request, person_id): + """Get information about a person. + + """ + return _api_lookup( + service.lookup_person, person_id, + notfound_msg='Person with id {} not found.'.format(person_id)) + + +@api_route(r'/release/(?P[0-9a-f]+)/', 'release') +@api_doc.route('/release/') +@api_doc.arg('sha1_git', + default='7045404f3d1c54e6473c71bbb716529fbad4be24', + argtype=api_doc.argtypes.sha1_git, + argdoc='release identifier') +@api_doc.raises(exc=api_doc.excs.badinput, doc=_doc_exc_bad_id) +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.dict, + retdoc='The metadata of the release identified by sha1_git') +def api_release(request, sha1_git): + """Get information about a release. + + Releases are identified by SHA1 checksums, compatible with Git tag + identifiers. See ``release_identifier`` in our `data model module + `_ + for details about how they are computed. + + """ + error_msg = 'Release with sha1_git %s not found.' % sha1_git + return _api_lookup( + service.lookup_release, sha1_git, + notfound_msg=error_msg, + enrich_fn=utils.enrich_release) + + +def _revision_directory_by(revision, path, request_path, + limit=100, with_data=False): + """Compute the revision matching criterion's directory or content data. + + Args: + revision: dictionary of criterions representing a revision to lookup + path: directory's path to lookup + request_path: request path which holds the original context to + limit: optional query parameter to limit the revisions log + (default to 100). For now, note that this limit could impede the + transitivity conclusion about sha1_git not being an ancestor of + with_data: indicate to retrieve the content's raw data if path resolves + to a content. + + """ + def enrich_directory_local(dir, context_url=request_path): + return utils.enrich_directory(dir, context_url) + + rev_id, result = service.lookup_directory_through_revision( + revision, path, limit=limit, with_data=with_data) + + content = result['content'] + if result['type'] == 'dir': # dir_entries + result['content'] = list(map(enrich_directory_local, content)) + else: # content + result['content'] = utils.enrich_content(content) + + return result + + +@api_route(r'/revision/origin/(?P[0-9]+)/log/', + 'revision-origin-log') +@api_route(r'/revision/origin/(?P[0-9]+)' + r'/ts/(?P.+)/log/', + 'revision-origin-log') +@api_route(r'/revision/origin/(?P[0-9]+)' + r'/branch/(?P.+)' + r'/ts/(?P.+)/log/', + 'revision-origin-log') +@api_route(r'/revision/origin/(?P[0-9]+)' + r'/branch/(?P.+)/log/', + 'revision-origin-log') +@api_doc.route('/revision/origin/log/') +@api_doc.arg('origin_id', + default=1, + argtype=api_doc.argtypes.int, + argdoc="The revision's SWH origin identifier") +@api_doc.arg('branch_name', + default='refs/heads/master', + argtype=api_doc.argtypes.path, + argdoc="""(Optional) The revision's branch name within the origin specified. +Defaults to 'refs/heads/master'.""") +@api_doc.arg('ts', + default='2000-01-17T11:23:54+00:00', + argtype=api_doc.argtypes.ts, + argdoc="""(Optional) A time or timestamp string to parse""") +@api_doc.header('Link', doc=_doc_header_link) +@api_doc.param('per_page', default=10, + argtype=api_doc.argtypes.int, + doc=_doc_arg_per_page) +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.dict, retdoc=_doc_ret_revision_log) +def api_revision_log_by(request, origin_id, + branch_name='refs/heads/master', + ts=None): + """Show the commit log for a revision, searching for it based on software + origin, branch name, and/or visit timestamp. + + This endpoint behaves like ``/log``, but operates on the revision that + has been found at a given software origin, close to a given point in time, + pointed by a given branch. + """ + result = {} + per_page = int(request.query_params.get('per_page', '10')) + + if ts: + ts = utils.parse_timestamp(ts) + + def lookup_revision_log_by_with_limit(o_id, br, ts, limit=per_page+1): + return service.lookup_revision_log_by(o_id, br, ts, limit) + + error_msg = 'No revision matching origin %s ' % origin_id + error_msg += ', branch name %s' % branch_name + error_msg += (' and time stamp %s.' % ts) if ts else '.' + + rev_get = _api_lookup( + lookup_revision_log_by_with_limit, origin_id, branch_name, ts, + notfound_msg=error_msg, + enrich_fn=utils.enrich_revision) + l = len(rev_get) + if l == per_page+1: + revisions = rev_get[:-1] + last_sha1_git = rev_get[-1]['id'] + + params = {k: v for k, v in {'origin_id': origin_id, + 'branch_name': branch_name, + 'ts': ts, + }.items() if v is not None} + + query_params = QueryDict('', mutable=True) + query_params['sha1_git'] = last_sha1_git + + if request.query_params.get('per_page'): + query_params['per_page'] = per_page + + result['headers'] = { + 'link-next': reverse('revision-origin-log', kwargs=params) + + (('?' + query_params.urlencode()) if len(query_params) > 0 else '') + } + + else: + revisions = rev_get + + result.update({'results': revisions}) + + return result + + +@api_route(r'/revision/origin/(?P[0-9]+)/directory/', + 'revision-directory') +@api_route(r'/revision/origin/(?P[0-9]+)/directory/(?P.+)/', + 'revision-directory') +@api_route(r'/revision/origin/(?P[0-9]+)' + r'/branch/(?P.+)/directory/', + 'revision-directory') +@api_route(r'/revision/origin/(?P[0-9]+)' + r'/branch/(?P.+)/ts/(?P.+)/directory/', + 'revision-directory') +@api_route(r'/revision/origin/(?P[0-9]+)' + r'/branch/(?P.+)/directory/(?P.+)/', + 'revision-directory') +@api_route(r'/revision/origin/(?P[0-9]+)' + r'/branch/(?P.+)/ts/(?P.+)' + r'/directory/(?P.+)/', + 'revision-directory') +@api_doc.route('/revision/origin/directory/', tags=['hidden']) +@api_doc.arg('origin_id', + default=1, + argtype=api_doc.argtypes.int, + argdoc="The revision's origin's SWH identifier") +@api_doc.arg('branch_name', + default='refs/heads/master', + argtype=api_doc.argtypes.path, + argdoc="""The optional branch for the given origin (default + to master""") +@api_doc.arg('ts', + default='2000-01-17T11:23:54+00:00', + argtype=api_doc.argtypes.ts, + argdoc="""Optional timestamp (default to the nearest time + crawl of timestamp)""") +@api_doc.arg('path', + default='Dockerfile', + argtype=api_doc.argtypes.path, + argdoc='The path to the directory or file to display') +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.dict, + retdoc="""The metadata of the revision corresponding to the + given criteria""") +def api_directory_through_revision_origin(request, origin_id, + branch_name="refs/heads/master", + ts=None, + path=None, + with_data=False): + """Display directory or content information through a revision identified + by origin/branch/timestamp. + """ + if ts: + ts = utils.parse_timestamp(ts) + + return _revision_directory_by({'origin_id': origin_id, + 'branch_name': branch_name, + 'ts': ts + }, + path, request.path, + with_data=with_data) + + +@api_route(r'/revision/origin/(?P[0-9]+)/', + 'revision-origin') +@api_route(r'/revision/origin/(?P[0-9]+)' + r'/branch/(?P.+)/', + 'revision-origin') +@api_route(r'/revision/origin/(?P[0-9]+)' + r'/branch/(?P.+)/ts/(?P.+)/', + 'revision-origin') +@api_route(r'/revision/origin/(?P[0-9]+)/ts/(?P.+)/', + 'revision-origin') +@api_doc.route('/revision/origin/') +@api_doc.arg('origin_id', + default=1, + argtype=api_doc.argtypes.int, + argdoc='software origin identifier') +@api_doc.arg('branch_name', + default='refs/heads/master', + argtype=api_doc.argtypes.path, + argdoc="""(optional) fully-qualified branch name, e.g., + "refs/heads/master". Defaults to the master branch.""") +@api_doc.arg('ts', + default=None, + argtype=api_doc.argtypes.ts, + argdoc="""(optional) timestamp close to which the revision pointed by + the given branch should be looked up. Defaults to now.""") +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.dict, retdoc=_doc_ret_revision_meta) +def api_revision_with_origin(request, origin_id, + branch_name="refs/heads/master", + ts=None): + """Get information about a revision, searching for it based on software + origin, branch name, and/or visit timestamp. + + This endpoint behaves like ``/revision``, but operates on the revision that + has been found at a given software origin, close to a given point in time, + pointed by a given branch. + + """ + ts = utils.parse_timestamp(ts) + return _api_lookup( + service.lookup_revision_by, origin_id, branch_name, ts, + notfound_msg=('Revision with (origin_id: {}, branch_name: {}' + ', ts: {}) not found.'.format(origin_id, + branch_name, ts)), + enrich_fn=utils.enrich_revision) + + +@api_route(r'/revision/(?P[0-9a-f]+)/prev/(?P[0-9a-f/]+)/', + 'revision-context') +@api_doc.route('/revision/prev/', tags=['hidden']) +@api_doc.arg('sha1_git', + default='ec72c666fb345ea5f21359b7bc063710ce558e39', + argtype=api_doc.argtypes.sha1_git, + argdoc="The revision's sha1_git identifier") +@api_doc.arg('context', + default='6adc4a22f20bbf3bbc754f1ec8c82be5dfb5c71a', + argtype=api_doc.argtypes.path, + argdoc='The navigation breadcrumbs -- use at your own risk') +@api_doc.raises(exc=api_doc.excs.badinput, doc=_doc_exc_bad_id) +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.dict, + retdoc='The metadata of the revision identified by sha1_git') +def api_revision_with_context(request, sha1_git, context): + """Return information about revision with id sha1_git. + """ + def _enrich_revision(revision, context=context): + return utils.enrich_revision(revision, context) + + return _api_lookup( + service.lookup_revision, sha1_git, + notfound_msg='Revision with sha1_git %s not found.' % sha1_git, + enrich_fn=_enrich_revision) + + +@api_route(r'/revision/(?P[0-9a-f]+)/', 'revision') +@api_doc.route('/revision/') +@api_doc.arg('sha1_git', + default='aafb16d69fd30ff58afdd69036a26047f3aebdc6', + argtype=api_doc.argtypes.sha1_git, + argdoc="revision identifier") +@api_doc.raises(exc=api_doc.excs.badinput, doc=_doc_exc_bad_id) +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.dict, retdoc=_doc_ret_revision_meta) +def api_revision(request, sha1_git): + """Get information about a revision. + + Revisions are identified by SHA1 checksums, compatible with Git commit + identifiers. See ``revision_identifier`` in our `data model module + `_ + for details about how they are computed. + + """ + return _api_lookup( + service.lookup_revision, sha1_git, + notfound_msg='Revision with sha1_git {} not found.'.format(sha1_git), + enrich_fn=utils.enrich_revision) + + +@api_route(r'/revision/(?P[0-9a-f]+)/raw/', 'revision-raw-message') +@api_doc.route('/revision/raw/', tags=['hidden'], handle_response=True) +@api_doc.arg('sha1_git', + default='ec72c666fb345ea5f21359b7bc063710ce558e39', + argtype=api_doc.argtypes.sha1_git, + argdoc="The queried revision's sha1_git identifier") +@api_doc.raises(exc=api_doc.excs.badinput, doc=_doc_exc_bad_id) +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.octet_stream, + retdoc="""The message of the revision identified by sha1_git + as a downloadable octet stream""") +def api_revision_raw_message(request, sha1_git): + """Return the raw data of the message of revision identified by sha1_git + """ + raw = service.lookup_revision_message(sha1_git) + response = HttpResponse(raw['message'], + content_type='application/octet-stream') + response['Content-disposition'] = \ + 'attachment;filename=rev_%s_raw' % sha1_git + return response + + +@api_route(r'/revision/(?P[0-9a-f]+)/directory/', + 'revision-directory') +@api_route(r'/revision/(?P[0-9a-f]+)/directory/(?P.+)/', + 'revision-directory') +@api_doc.route('/revision/directory/') +@api_doc.arg('sha1_git', + default='ec72c666fb345ea5f21359b7bc063710ce558e39', + argtype=api_doc.argtypes.sha1_git, + argdoc='revision identifier') +@api_doc.arg('dir_path', + default='Documentation/BUG-HUNTING', + argtype=api_doc.argtypes.path, + argdoc="""path relative to the root directory of revision identifier by + sha1_git""") +@api_doc.raises(exc=api_doc.excs.badinput, doc=_doc_exc_bad_id) +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.dict, + retdoc="""either a list of directory entries with their metadata, + or the metadata of a single directory entry""") +def api_revision_directory(request, sha1_git, + dir_path=None, + with_data=False): + """Get information about directory (entry) objects associated to revisions. + + Each revision is associated to a single "root" directory. This endpoint + behaves like ``/directory/``, but operates on the root directory associated + to a given revision. + + """ + return _revision_directory_by({'sha1_git': sha1_git}, + dir_path, request.path, + with_data=with_data) + + +@api_route(r'/revision/(?P[0-9a-f]+)/log/', 'revision-log') +@api_route(r'/revision/(?P[0-9a-f]+)' + r'/prev/(?P[0-9a-f/]+)/log/', + 'revision-log') +@api_doc.route('/revision/log/') +@api_doc.arg('sha1_git', + default='37fc9e08d0c4b71807a4f1ecb06112e78d91c283', + argtype=api_doc.argtypes.sha1_git, + argdoc='revision identifier') +@api_doc.arg('prev_sha1s', + default='6adc4a22f20bbf3bbc754f1ec8c82be5dfb5c71a', + argtype=api_doc.argtypes.path, + argdoc="""(Optional) Navigation breadcrumbs (descendant revisions +previously visited). If multiple values, use / as delimiter. """) +@api_doc.header('Link', doc=_doc_header_link) +@api_doc.param('per_page', default=10, + argtype=api_doc.argtypes.int, + doc=_doc_arg_per_page) +@api_doc.raises(exc=api_doc.excs.badinput, doc=_doc_exc_bad_id) +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.dict, retdoc=_doc_ret_revision_log) +def api_revision_log(request, sha1_git, prev_sha1s=None): + """Get a list of all revisions heading to a given one, i.e., show the + commit log. + + """ + result = {} + per_page = int(request.query_params.get('per_page', '10')) + + def lookup_revision_log_with_limit(s, limit=per_page+1): + return service.lookup_revision_log(s, limit) + + error_msg = 'Revision with sha1_git %s not found.' % sha1_git + rev_get = _api_lookup(lookup_revision_log_with_limit, sha1_git, + notfound_msg=error_msg, + enrich_fn=utils.enrich_revision) + + l = len(rev_get) + if l == per_page+1: + rev_backward = rev_get[:-1] + new_last_sha1 = rev_get[-1]['id'] + query_params = QueryDict('', mutable=True) + + if request.query_params.get('per_page'): + query_params['per_page'] = per_page + + result['headers'] = { + 'link-next': reverse('revision-log', + kwargs={'sha1_git': new_last_sha1}) + + (('?' + query_params.urlencode()) if len(query_params) > 0 else '') + } + + else: + rev_backward = rev_get + + if not prev_sha1s: # no nav breadcrumbs, so we're done + revisions = rev_backward + + else: + rev_forward_ids = prev_sha1s.split('/') + rev_forward = _api_lookup( + service.lookup_revision_multiple, rev_forward_ids, + notfound_msg=error_msg, + enrich_fn=utils.enrich_revision) + revisions = rev_forward + rev_backward + + result.update({ + 'results': revisions + }) + return result + + +@api_route(r'/directory/(?P[0-9a-f]+)/', 'directory') +@api_route(r'/directory/(?P[0-9a-f]+)/(?P.+)/', 'directory') +@api_doc.route('/directory/') +@api_doc.arg('sha1_git', + default='1bd0e65f7d2ff14ae994de17a1e7fe65111dcad8', + argtype=api_doc.argtypes.sha1_git, + argdoc='directory identifier') +@api_doc.arg('path', + default='codec/demux', + argtype=api_doc.argtypes.path, + argdoc='path relative to directory identified by sha1_git') +@api_doc.raises(exc=api_doc.excs.badinput, doc=_doc_exc_bad_id) +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.dict, + retdoc="""either a list of directory entries with their metadata, + or the metadata of a single directory entry""") +def api_directory(request, sha1_git, path=None): + """Get information about directory or directory entry objects. + + Directories are identified by SHA1 checksums, compatible with Git directory + identifiers. See ``directory_identifier`` in our `data model module + `_ + for details about how they are computed. + + When given only a directory identifier, this endpoint returns information + about the directory itself, returning its content (usually a list of + directory entries). When given a directory identifier and a path, this + endpoint returns information about the directory entry pointed by the + relative path, starting path resolution from the given directory. + + """ + if path: + error_msg_path = ('Entry with path %s relative to directory ' + 'with sha1_git %s not found.') % (path, sha1_git) + return _api_lookup( + service.lookup_directory_with_path, sha1_git, path, + notfound_msg=error_msg_path, + enrich_fn=utils.enrich_directory) + else: + error_msg_nopath = 'Directory with sha1_git %s not found.' % sha1_git + return _api_lookup( + service.lookup_directory, sha1_git, + notfound_msg=error_msg_nopath, + enrich_fn=utils.enrich_directory) + + +@api_route(r'/content/(?P.+)/provenance/', 'content-provenance') +@api_doc.route('/content/provenance/', tags=['hidden']) +@api_doc.arg('q', + default='sha1_git:88b9b366facda0b5ff8d8640ee9279bed346f242', + argtype=api_doc.argtypes.algo_and_hash, + argdoc=_doc_arg_content_id) +@api_doc.raises(exc=api_doc.excs.badinput, doc=_doc_exc_bad_id) +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.dict, + retdoc="""List of provenance information (dict) for the matched + content.""") +def api_content_provenance(request, q): + """Return content's provenance information if any. + + """ + def _enrich_revision(provenance): + p = provenance.copy() + p['revision_url'] = \ + reverse('revision', kwargs={'sha1_git': provenance['revision']}) + p['content_url'] = \ + reverse('content', + kwargs={'q': 'sha1_git:%s' % provenance['content']}) + p['origin_url'] = \ + reverse('origin', kwargs={'origin_id': provenance['origin']}) + p['origin_visits_url'] = \ + reverse('origin-visits', + kwargs={'origin_id': provenance['origin']}) + p['origin_visit_url'] = \ + reverse('origin-visit', kwargs={'origin_id': provenance['origin'], + 'visit_id': provenance['visit']}) + return p + + return _api_lookup( + service.lookup_content_provenance, q, + notfound_msg='Content with {} not found.'.format(q), + enrich_fn=_enrich_revision) + + +@api_route(r'/content/(?P.+)/filetype/', 'content-filetype') +@api_doc.route('/content/filetype/', tags=['upcoming']) +@api_doc.arg('q', + default='sha1:1fc6129a692e7a87b5450e2ba56e7669d0c5775d', + argtype=api_doc.argtypes.algo_and_hash, + argdoc=_doc_arg_content_id) +@api_doc.raises(exc=api_doc.excs.badinput, doc=_doc_exc_bad_id) +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.dict, + retdoc="""Filetype information (dict) for the matched + content.""") +def api_content_filetype(request, q): + """Get information about the detected MIME type of a content object. + + """ + return _api_lookup( + service.lookup_content_filetype, q, + notfound_msg='No filetype information found for content {}.'.format(q), + enrich_fn=utils.enrich_metadata_endpoint) + + +@api_route(r'/content/(?P.+)/language/', 'content-language') +@api_doc.route('/content/language/', tags=['upcoming']) +@api_doc.arg('q', + default='sha1:1fc6129a692e7a87b5450e2ba56e7669d0c5775d', + argtype=api_doc.argtypes.algo_and_hash, + argdoc=_doc_arg_content_id) +@api_doc.raises(exc=api_doc.excs.badinput, doc=_doc_exc_bad_id) +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.dict, + retdoc="""Language information (dict) for the matched + content.""") +def api_content_language(request, q): + """Get information about the detected (programming) language of a content + object. + + """ + return _api_lookup( + service.lookup_content_language, q, + notfound_msg='No language information found for content {}.'.format(q), + enrich_fn=utils.enrich_metadata_endpoint) + + +@api_route(r'/content/(?P.+)/license/', 'content-license') +@api_doc.route('/content/license/', tags=['upcoming']) +@api_doc.arg('q', + default='sha1:1fc6129a692e7a87b5450e2ba56e7669d0c5775d', + argtype=api_doc.argtypes.algo_and_hash, + argdoc=_doc_arg_content_id) +@api_doc.raises(exc=api_doc.excs.badinput, doc=_doc_exc_bad_id) +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.dict, + retdoc="""License information (dict) for the matched + content.""") +def api_content_license(request, q): + """Get information about the detected license of a content object. + + """ + return _api_lookup( + service.lookup_content_license, q, + notfound_msg='No license information found for content {}.'.format(q), + enrich_fn=utils.enrich_metadata_endpoint) + + +@api_route(r'/content/(?P.+)/ctags/', 'content-ctags') +@api_doc.route('/content/ctags/', tags=['upcoming']) +@api_doc.arg('q', + default='sha1:1fc6129a692e7a87b5450e2ba56e7669d0c5775d', + argtype=api_doc.argtypes.algo_and_hash, + argdoc=_doc_arg_content_id) +@api_doc.raises(exc=api_doc.excs.badinput, doc=_doc_exc_bad_id) +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.dict, + retdoc="""Ctags symbol (dict) for the matched + content.""") +def api_content_ctags(request, q): + """Get information about all `Ctags `_-style + symbols defined in a content object. + + """ + return _api_lookup( + service.lookup_content_ctags, q, + notfound_msg='No ctags symbol found for content {}.'.format(q), + enrich_fn=utils.enrich_metadata_endpoint) + + +@api_route(r'/content/(?P.+)/raw/', 'content-raw') +@api_doc.route('/content/raw/', handle_response=True) +@api_doc.arg('q', + default='adc83b19e793491b1c6ea0fd8b46cd9f32e592fc', + argtype=api_doc.argtypes.algo_and_hash, + argdoc=_doc_arg_content_id) +@api_doc.param('filename', default=None, + argtype=api_doc.argtypes.str, + doc='User\'s desired filename. If provided, the downloaded' + ' content will get that filename.') +@api_doc.raises(exc=api_doc.excs.badinput, doc=_doc_exc_bad_id) +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.octet_stream, + retdoc='The raw content data as an octet stream') +def api_content_raw(request, q): + """Get the raw content of a content object (AKA "blob"), as a byte + sequence. + + """ + def generate(content): + yield content['data'] + + content_raw = service.lookup_content_raw(q) + if not content_raw: + raise NotFoundExc('Content %s is not found.' % q) + + content_filetype = service.lookup_content_filetype(q) + if not content_filetype: + raise NotFoundExc('Content %s is not available for download.' % q) + + mimetype = content_filetype['mimetype'] + if 'text/' not in mimetype: + raise ForbiddenExc('Only textual content is available for download. ' + 'Actual content mimetype is %s.' % mimetype) + + filename = request.query_params.get('filename') + if not filename: + filename = 'content_%s_raw' % q.replace(':', '_') + + response = HttpResponse(generate(content_raw), + content_type='application/octet-stream') + response['Content-disposition'] = 'attachment; filename=%s' % filename + return response + + +@api_route(r'/content/(?P.+)/', 'content') +@api_doc.route('/content/') +@api_doc.arg('q', + default='adc83b19e793491b1c6ea0fd8b46cd9f32e592fc', + argtype=api_doc.argtypes.algo_and_hash, + argdoc=_doc_arg_content_id) +@api_doc.raises(exc=api_doc.excs.badinput, doc=_doc_exc_bad_id) +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.dict, + retdoc="""known metadata for content identified by q""") +def api_content_metadata(request, q): + """Get information about a content (AKA "blob") object. + + """ + return _api_lookup( + service.lookup_content, q, + notfound_msg='Content with {} not found.'.format(q), + enrich_fn=utils.enrich_content) + + +@api_route(r'/entity/(?P.+)/', 'entity') +@api_doc.route('/entity/', tags=['hidden']) +@api_doc.arg('uuid', + default='5f4d4c51-498a-4e28-88b3-b3e4e8396cba', + argtype=api_doc.argtypes.uuid, + argdoc="The entity's uuid identifier") +@api_doc.raises(exc=api_doc.excs.badinput, doc=_doc_exc_bad_id) +@api_doc.raises(exc=api_doc.excs.notfound, doc=_doc_exc_id_not_found) +@api_doc.returns(rettype=api_doc.rettypes.dict, + retdoc='The metadata of the entity identified by uuid') +def api_entity_by_uuid(request, uuid): + """Return content information if content is found. + + """ + return _api_lookup( + service.lookup_entity_by_uuid, uuid, + notfound_msg="Entity with uuid '%s' not found." % uuid, + enrich_fn=utils.enrich_entity) diff --git a/swh/web/config.py b/swh/web/config.py new file mode 100644 index 00000000..58980cc0 --- /dev/null +++ b/swh/web/config.py @@ -0,0 +1,45 @@ +# Copyright (C) 2017 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from swh.core import config +from swh.storage import get_storage + +DEFAULT_CONFIG = { + 'storage': ('dict', { + 'cls': 'remote', + 'args': { + 'url': 'http://127.0.0.1:5002/', + }, + }), + 'log_dir': ('string', '/tmp/swh/log'), + 'debug': ('bool', True), + 'host': ('string', '127.0.0.1'), + 'port': ('int', 8000), + 'secret_key': ('string', 'development key'), + 'limiter_rate': ('string', '60/min') +} + +swhweb_config = None + + +def get_config(config_file=None): + """Read the configuration file `config_file`, update the app with + parameters (secret_key, conf) and return the parsed configuration as a + dict. If no configuration file is provided, return a default + configuration.""" + + global swhweb_config + if not swhweb_config or config_file: + swhweb_config = config.read(config_file, DEFAULT_CONFIG) + config.prepare_folders(swhweb_config, 'log_dir') + swhweb_config['storage'] = get_storage(**swhweb_config['storage']) + return swhweb_config + + +def storage(): + """Return the current application's SWH storage. + + """ + return get_config()['storage'] diff --git a/swh/web/db.sqlite3 b/swh/web/db.sqlite3 new file mode 100644 index 00000000..b88b6184 Binary files /dev/null and b/swh/web/db.sqlite3 differ diff --git a/swh/web/manage.py b/swh/web/manage.py new file mode 100644 index 00000000..166d3ed4 --- /dev/null +++ b/swh/web/manage.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python + +# Copyright (C) 2017 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import os +import sys + +from swh.web import config + +# Default configuration file +DEFAULT_CONF_FILE = '~/.config/swh/webapp.yml' + +if __name__ == "__main__": + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "swh.web.settings") + try: + from django.core.management.commands.runserver import ( + Command as runserver + ) + from django.core.management import execute_from_command_line + except ImportError: + # The above import may fail for some other reason. Ensure that the + # issue is really that Django is missing to avoid masking other + # exceptions on Python 2. + try: + import django # noqa + except ImportError: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) + raise + swh_web_config = config.get_config(DEFAULT_CONF_FILE) + runserver.default_port = swh_web_config['port'] + runserver.default_addr = swh_web_config['host'] + execute_from_command_line(sys.argv) diff --git a/swh/web/settings.py b/swh/web/settings.py new file mode 100644 index 00000000..d993d7b3 --- /dev/null +++ b/swh/web/settings.py @@ -0,0 +1,182 @@ +# Copyright (C) 2017 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + + +""" +Django settings for swhweb project. + +Generated by 'django-admin startproject' using Django 1.11.3. + +For more information on this file, see +https://docs.djangoproject.com/en/1.11/topics/settings/ + +For the full list of settings and their values, see +https://docs.djangoproject.com/en/1.11/ref/settings/ +""" + +import os + +from swh.web.config import get_config + +swh_web_config = get_config() + +# Build paths inside the project like this: os.path.join(BASE_DIR, ...) +PROJECT_DIR = os.path.dirname(os.path.abspath(__file__)) + +# Quick-start development settings - unsuitable for production +# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = swh_web_config['secret_key'] + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = swh_web_config['debug'] + +ALLOWED_HOSTS = ['127.0.0.1', 'localhost', 'testserver'] + +# Application definition + +INSTALLED_APPS = [ + 'django.contrib.admin', + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.messages', + 'django.contrib.staticfiles', + 'django_extensions', + 'rest_framework', + 'rest_framework_swagger', + 'swh.web.api' +] + +MIDDLEWARE = [ + 'django.middleware.security.SecurityMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', +] + +ROOT_URLCONF = 'swh.web.urls' + +TEMPLATES = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [], + 'APP_DIRS': True, + 'OPTIONS': { + 'context_processors': [ + 'django.template.context_processors.debug', + 'django.template.context_processors.request', + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages', + ], + }, + }, +] + +WSGI_APPLICATION = 'swh.web.wsgi.application' + + +# Database +# https://docs.djangoproject.com/en/1.11/ref/settings/#databases + +DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.sqlite3', + 'NAME': os.path.join(PROJECT_DIR, 'db.sqlite3'), + } +} + +# Password validation +# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators + +AUTH_PASSWORD_VALIDATORS = [ + { + 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', # noqa + }, + { + 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', # noqa + }, + { + 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', # noqa + }, + { + 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', # noqa + }, +] + + +# Internationalization +# https://docs.djangoproject.com/en/1.11/topics/i18n/ + +LANGUAGE_CODE = 'en-us' + +TIME_ZONE = 'UTC' + +USE_I18N = True + +USE_L10N = True + +USE_TZ = True + +# Static files (CSS, JavaScript, Images) +# https://docs.djangoproject.com/en/1.11/howto/static-files/ + +STATIC_URL = '/static/' +STATICFILES_DIRS = [ + os.path.join(PROJECT_DIR, "static") +] + +INTERNAL_IPS = ['127.0.0.1'] + +REST_FRAMEWORK = { + 'DEFAULT_RENDERER_CLASSES': ( + 'rest_framework.renderers.JSONRenderer', + 'rest_framework_yaml.renderers.YAMLRenderer', + 'rest_framework.renderers.TemplateHTMLRenderer' + ), + 'DEFAULT_THROTTLE_CLASSES': ( + 'rest_framework.throttling.AnonRateThrottle', + ), + 'DEFAULT_THROTTLE_RATES': { + 'anon': None if DEBUG else swh_web_config['limiter_rate'], + } +} + +LOGGING = { + 'version': 1, + 'disable_existing_loggers': False, + 'filters': { + 'require_debug_false': { + '()': 'django.utils.log.RequireDebugFalse', + }, + 'require_debug_true': { + '()': 'django.utils.log.RequireDebugTrue', + }, + }, + 'handlers': { + 'console': { + 'level': 'INFO', + 'filters': ['require_debug_true'], + 'class': 'logging.StreamHandler', + }, + 'file': { + 'level': 'DEBUG', + 'filters': ['require_debug_false'], + 'class': 'logging.FileHandler', + 'filename': os.path.join(swh_web_config['log_dir'], 'swh-web.log'), + }, + }, + 'loggers': { + 'django': { + 'handlers': ['console', 'file'], + 'level': 'DEBUG', + 'propagate': True, + }, + }, +} diff --git a/swh/web/ui/static/css/bootstrap-responsive.min.css b/swh/web/static/css/bootstrap-responsive.min.css similarity index 100% rename from swh/web/ui/static/css/bootstrap-responsive.min.css rename to swh/web/static/css/bootstrap-responsive.min.css diff --git a/swh/web/ui/static/css/pygment.css b/swh/web/static/css/pygment.css similarity index 100% rename from swh/web/ui/static/css/pygment.css rename to swh/web/static/css/pygment.css diff --git a/swh/web/ui/static/css/style.css b/swh/web/static/css/style.css similarity index 100% rename from swh/web/ui/static/css/style.css rename to swh/web/static/css/style.css diff --git a/swh/web/ui/static/img/arrow-up-small.png b/swh/web/static/img/arrow-up-small.png similarity index 100% rename from swh/web/ui/static/img/arrow-up-small.png rename to swh/web/static/img/arrow-up-small.png diff --git a/swh/web/ui/static/img/icons/swh-logo-32x32.png b/swh/web/static/img/icons/swh-logo-32x32.png similarity index 100% rename from swh/web/ui/static/img/icons/swh-logo-32x32.png rename to swh/web/static/img/icons/swh-logo-32x32.png diff --git a/swh/web/ui/static/img/icons/swh-logo-archive-180x180.png b/swh/web/static/img/icons/swh-logo-archive-180x180.png similarity index 100% rename from swh/web/ui/static/img/icons/swh-logo-archive-180x180.png rename to swh/web/static/img/icons/swh-logo-archive-180x180.png diff --git a/swh/web/ui/static/img/icons/swh-logo-archive-192x192.png b/swh/web/static/img/icons/swh-logo-archive-192x192.png similarity index 100% rename from swh/web/ui/static/img/icons/swh-logo-archive-192x192.png rename to swh/web/static/img/icons/swh-logo-archive-192x192.png diff --git a/swh/web/ui/static/img/icons/swh-logo-archive-270x270.png b/swh/web/static/img/icons/swh-logo-archive-270x270.png similarity index 100% rename from swh/web/ui/static/img/icons/swh-logo-archive-270x270.png rename to swh/web/static/img/icons/swh-logo-archive-270x270.png diff --git a/swh/web/ui/static/img/swh-logo-archive.png b/swh/web/static/img/swh-logo-archive.png similarity index 100% rename from swh/web/ui/static/img/swh-logo-archive.png rename to swh/web/static/img/swh-logo-archive.png diff --git a/swh/web/ui/static/img/swh-logo-archive.svg b/swh/web/static/img/swh-logo-archive.svg similarity index 100% rename from swh/web/ui/static/img/swh-logo-archive.svg rename to swh/web/static/img/swh-logo-archive.svg diff --git a/swh/web/ui/static/js/calendar.js b/swh/web/static/js/calendar.js similarity index 100% rename from swh/web/ui/static/js/calendar.js rename to swh/web/static/js/calendar.js diff --git a/swh/web/ui/static/js/search.js b/swh/web/static/js/search.js similarity index 100% rename from swh/web/ui/static/js/search.js rename to swh/web/static/js/search.js diff --git a/swh/web/ui/static/robots.txt b/swh/web/static/robots.txt similarity index 100% rename from swh/web/ui/static/robots.txt rename to swh/web/static/robots.txt diff --git a/swh/web/ui/main.py b/swh/web/ui/main.py deleted file mode 100644 index 0d01674a..00000000 --- a/swh/web/ui/main.py +++ /dev/null @@ -1,201 +0,0 @@ -# Copyright (C) 2015-2016 The Software Heritage developers -# See the AUTHORS file at the top-level directory of this distribution -# License: GNU Affero General Public License version 3, or any later version -# See top-level LICENSE file for more information - -import ipaddress -import logging -import os - -from flask import Flask -from flask_limiter import Limiter -from flask_limiter.util import get_remote_address - -from swh.core import config - -from swh.web.ui.renderers import urlize_api_links, safe_docstring_display -from swh.web.ui.renderers import revision_id_from_url, highlight_source -from swh.web.ui.renderers import SWHMultiResponse, urlize_header_links -from swh.storage import get_storage - - -DEFAULT_CONFIG = { - 'storage': ('dict', { - 'cls': 'remote', - 'args': { - 'url': 'http://127.0.0.1:5002/', - }, - }), - 'log_dir': ('string', '/tmp/swh/log'), - 'debug': ('bool', None), - 'host': ('string', '127.0.0.1'), - 'port': ('int', 5004), - 'secret_key': ('string', 'development key'), - 'max_log_revs': ('int', 25), - 'limiter': ('dict', { - 'global_limits': ['60 per minute'], - 'headers_enabled': True, - 'strategy': 'moving-window', - 'storage_uri': 'memory://', - 'storage_options': {}, - 'in_memory_fallback': ['60 per minute'], - }), -} - -class SWHFlask(Flask): - """SWH's flask application. - - """ - response_class = SWHMultiResponse - - -app = SWHFlask(__name__) -app.add_template_filter(urlize_api_links) -app.add_template_filter(urlize_header_links) -app.add_template_filter(safe_docstring_display) -app.add_template_filter(revision_id_from_url) -app.add_template_filter(highlight_source) - -def read_config(config_file): - """Read the configuration file `config_file`, update the app with - parameters (secret_key, conf) and return the parsed configuration as a - dict""" - - conf = config.read(config_file, DEFAULT_CONFIG) - config.prepare_folders(conf, 'log_dir') - conf['storage'] = get_storage(**conf['storage']) - - return conf - -def load_controllers(): - """Load the controllers for the application. - - """ - from swh.web.ui import views, apidoc # flake8: noqa - - -def rules(): - """Returns rules from the application in dictionary form. - - Beware, must be called after swh.web.ui.main.load_controllers funcall. - - Returns: - Generator of application's rules. - - """ - for rule in app.url_map._rules: - yield {'rule': rule.rule, - 'methods': rule.methods, - 'endpoint': rule.endpoint} - - -def storage(): - """Return the current application's storage. - - """ - return app.config['conf']['storage'] - -def prepare_limiter(): - """Prepare Flask Limiter from configuration and App configuration""" - if hasattr(app, 'limiter'): - return - - shared_limits = app.config['conf']['limiter'].pop('shared_limits', {}) - for name, shared_limit in shared_limits.items(): - if not shared_limit.get('exempted_networks'): - shared_limit['exempt_when'] = lambda: False - continue - - networks = [ipaddress.ip_network(network) - for network in shared_limit['exempted_networks']] - - def exempt(exempted=networks): - remote_address = ipaddress.ip_address(get_remote_address()) - return any(remote_address in network for network in exempted) - - shared_limit['exempt_when'] = exempt - - limiter = Limiter( - app, - key_func=get_remote_address, - **app.config['conf']['limiter'] - ) - app.limiter = limiter - - for view_name in sorted(app.view_functions): - for limit_name, shared_limit in shared_limits.items(): - if view_name.startswith(shared_limit['prefix']): - view_func = app.view_functions[view_name] - app.view_functions[view_name] = limiter.shared_limit( - ','.join(shared_limit['limits']), - limit_name, - key_func=get_remote_address, - exempt_when=shared_limit['exempt_when'], - )(view_func) - - -def run_from_webserver(environ, start_response): - """Run the WSGI app from the webserver, loading the configuration. - - Note: This function is called on a per-request basis so beware the side - effects here! - """ - - if 'conf' not in app.config: - load_controllers() - - config_path = '/etc/softwareheritage/webapp/webapp.yml' - - conf = read_config(config_path) - - app.secret_key = conf['secret_key'] - app.config['conf'] = conf - - prepare_limiter() - - logging.basicConfig(filename=os.path.join(conf['log_dir'], 'web-ui.log'), - level=logging.INFO) - - return app(environ, start_response) - - -def run_debug_from(config_path, verbose=False): - """Run the api's server in dev mode. - - Note: This is called only once (contrast with the production mode - in run_from_webserver function) - - Args: - conf is a dictionary of keywords: - - 'db_url' the db url's access (through psycopg2 format) - - 'content_storage_dir' revisions/directories/contents storage on disk - - 'host' to override the default 127.0.0.1 to open or not the server - to the world - - 'port' to override the default of 5004 (from the underlying layer: - flask) - - 'debug' activate the verbose logs - - 'secret_key' the flask secret key - - Returns: - Never - - """ - load_controllers() - - conf = read_config(config_path) - - app.secret_key = conf['secret_key'] - app.config['conf'] = conf - - host = conf.get('host', '127.0.0.1') - port = conf.get('port') - debug = conf.get('debug') - - prepare_limiter() - - log_file = os.path.join(conf['log_dir'], 'web-ui.log') - logging.basicConfig(level=logging.DEBUG if verbose else logging.INFO, - handlers=[logging.FileHandler(log_file), - logging.StreamHandler()]) - - app.run(host=host, port=port, debug=debug) diff --git a/swh/web/ui/renderers.py b/swh/web/ui/renderers.py deleted file mode 100644 index 406cd071..00000000 --- a/swh/web/ui/renderers.py +++ /dev/null @@ -1,289 +0,0 @@ -# Copyright (C) 2015-2017 The Software Heritage developers -# See the AUTHORS file at the top-level directory of this distribution -# License: GNU Affero General Public License version 3, or any later version -# See top-level LICENSE file for more information - -import re -import yaml -import json - -from docutils.core import publish_parts -from docutils.writers.html4css1 import Writer, HTMLTranslator -from inspect import cleandoc -from jinja2 import escape, Markup -from flask import request, Response, render_template -from flask import g -from pygments import highlight -from pygments.lexers import guess_lexer -from pygments.formatters import HtmlFormatter - -from swh.web.ui import utils - - -class SWHFilterEnricher(): - """Global filter on fields. - - """ - @classmethod - def filter_by_fields(cls, data): - """Extract a request parameter 'fields' if it exists to permit the - filtering on the data dict's keys. - - If such field is not provided, returns the data as is. - - """ - fields = request.args.get('fields') - if fields: - fields = set(fields.split(',')) - data = utils.filter_field_keys(data, fields) - - return data - - -class SWHComputeLinkHeader: - """Add link header to response. - - Mixin intended to be used for example in SWHMultiResponse - - """ - @classmethod - def compute_link_header(cls, rv, options): - """Add Link header in returned value results. - - Expects rv to be a dict with 'results' and 'headers' key: - 'results': the returned value expected to be shown - 'headers': dictionary with link-next and link-prev - - Args: - rv (dict): with keys: - - 'headers': potential headers with 'link-next' - and 'link-prev' keys - - 'results': containing the result to return - options (dict): the initial dict to update with result if any - - Returns: - Dict with optional keys 'link-next' and 'link-prev'. - - """ - link_headers = [] - - if 'headers' not in rv: - return {} - - rv_headers = rv['headers'] - - if 'link-next' in rv_headers: - link_headers.append('<%s>; rel="next"' % ( - rv_headers['link-next'])) - if 'link-prev' in rv_headers: - link_headers.append('<%s>; rel="previous"' % ( - rv_headers['link-prev'])) - - if link_headers: - link_header_str = ','.join(link_headers) - headers = options.get('headers', {}) - headers.update({ - 'Link': link_header_str - }) - return headers - - return {} - - -class SWHTransformProcessor: - """Transform an eventual returned value with multiple layer of - information with only what's necessary. - - If the returned value rv contains the 'results' key, this is the - associated value which is returned. - - Otherwise, return the initial dict without the potential 'headers' - key. - - """ - @classmethod - def transform(cls, rv): - if 'results' in rv: - return rv['results'] - - if 'headers' in rv: - rv.pop('headers') - - return rv - - -class SWHMultiResponse(Response, SWHFilterEnricher, - SWHComputeLinkHeader, SWHTransformProcessor): - """ - A Flask Response subclass. - Override force_type to transform dict/list responses into callable Flask - response objects whose mimetype matches the request's Accept header: HTML - template render, YAML dump or default to a JSON dump. - """ - - @classmethod - def make_response_from_mimetype(cls, rv, options={}): - options = options.copy() - if not (isinstance(rv, list) or isinstance(rv, dict)): - return rv - - def wants_html(best_match): - return best_match == 'text/html' and \ - request.accept_mimetypes[best_match] > \ - request.accept_mimetypes['application/json'] - - def wants_yaml(best_match): - return best_match == 'application/yaml' and \ - request.accept_mimetypes[best_match] > \ - request.accept_mimetypes['application/json'] - - acc_mime = ['application/json', 'application/yaml', 'text/html'] - best_match = request.accept_mimetypes.best_match(acc_mime) - - options['headers'] = cls.compute_link_header(rv, options) - - rv = cls.transform(rv) - rv = cls.filter_by_fields(rv) - - if wants_html(best_match): - data = json.dumps(rv, sort_keys=True, - indent=4, separators=(',', ': ')) - env = g.get('doc_env', {}) - env['response_data'] = data - - env['headers_data'] = None - if options and 'headers' in options: - env['headers_data'] = options['headers'] - - env['request'] = request - env['heading'] = utils.shorten_path(str(request.path)) - env['status_code'] = options.get('status', 200) - rv = Response(render_template('apidoc.html', **env), - content_type='text/html', - **options) - elif wants_yaml(best_match): - rv = Response( - yaml.dump(rv), - content_type='application/yaml', - **options) - else: - # jsonify is unhappy with lists in Flask 0.10.1, use json.dumps - rv = Response( - json.dumps(rv), - content_type='application/json', - **options) - return rv - - @classmethod - def force_type(cls, rv, environ=None): - if isinstance(rv, dict) or isinstance(rv, list): - rv = cls.make_response_from_mimetype(rv) - return super().force_type(rv, environ) - - -def error_response(error_code, error): - """Private function to create a custom error response. - - """ - error_opts = {'status': error_code} - error_data = { - 'exception': error.__class__.__name__, - 'reason': str(error), - } - - return SWHMultiResponse.make_response_from_mimetype(error_data, - options=error_opts) - - -def urlize_api_links(text): - """Utility function for decorating api links in browsable api. - - Args: - text: whose content matching links should be transformed into - contextual API or Browse html links. - - Returns - The text transformed if any link is found. - The text as is otherwise. - - """ - return re.sub(r'(/api/.*/|/browse/.*/)', - r'\1', - str(escape(text))) - - -def urlize_header_links(text): - """Utility function for decorating headers links in browsable api. - - Args - text: Text whose content contains Link header value - - Returns: - The text transformed with html link if any link is found. - The text as is otherwise. - - """ - return re.sub(r'<(/api/.*|/browse/.*)>', r'<\1>', - text) - - -class NoHeaderHTMLTranslator(HTMLTranslator): - """ - Docutils translator subclass to customize the generation of HTML - from reST-formatted docstrings - """ - def __init__(self, document): - super().__init__(document) - self.body_prefix = [] - self.body_suffix = [] - - def visit_bullet_list(self, node): - self.context.append((self.compact_simple, self.compact_p)) - self.compact_p = None - self.compact_simple = self.is_compactable(node) - self.body.append(self.starttag(node, 'ul', CLASS='docstring')) - - -DOCSTRING_WRITER = Writer() -DOCSTRING_WRITER.translator_class = NoHeaderHTMLTranslator - - -def safe_docstring_display(docstring): - """ - Utility function to htmlize reST-formatted documentation in browsable - api. - """ - docstring = cleandoc(docstring) - return publish_parts(docstring, writer=DOCSTRING_WRITER)['html_body'] - - -def revision_id_from_url(url): - """Utility function to obtain a revision's ID from its browsing URL.""" - return re.sub(r'/browse/revision/([0-9a-f]{40}|[0-9a-f]{64})/.*', - r'\1', url) - - -def highlight_source(source_code_as_text): - """Leverage pygments to guess and highlight source code. - - Args - source_code_as_text (str): source code in plain text - - Returns: - Highlighted text if possible or plain text otherwise - - """ - try: - maybe_lexer = guess_lexer(source_code_as_text) - if maybe_lexer: - r = highlight( - source_code_as_text, maybe_lexer, - HtmlFormatter(linenos=True, - lineanchors='l', - anchorlinenos=True)) - else: - r = '
%s
' % source_code_as_text - except: - r = '
%s
' % source_code_as_text - - return Markup(r) diff --git a/swh/web/ui/tests/test_app.py b/swh/web/ui/tests/test_app.py deleted file mode 100644 index f39531a2..00000000 --- a/swh/web/ui/tests/test_app.py +++ /dev/null @@ -1,108 +0,0 @@ -# Copyright (C) 2015-2016 The Software Heritage developers -# See the AUTHORS file at the top-level directory of this distribution -# License: GNU Affero General Public License version 3, or any later version -# See top-level LICENSE file for more information - -# Functions defined here are NOT DESIGNED FOR PRODUCTION - -import unittest -from swh.storage.api.client import RemoteStorage as Storage -from swh.web.ui import main -from flask_testing import TestCase - - -# Because the Storage's __init__ function does side effect at startup... -class RemoteStorageAdapter(Storage): - def __init__(self, base_url): - self.base_url = base_url - - -def _init_mock_storage(base_url='https://somewhere.org:4321'): - """Instanciate a remote storage whose goal is to be mocked in a test - context. - - NOT FOR PRODUCTION - - Returns: - An instance of swh.storage.api.client.RemoteStorage destined to be - mocked (it does not do any rest call) - - """ - return RemoteStorageAdapter(base_url) # destined to be used as mock - - -def create_app(base_url='https://somewhere.org:4321'): - """Function to initiate a flask app with storage designed to be mocked. - - Returns: - Tuple: - - app test client (for testing api, client decorator from flask) - - application's full configuration - - the storage instance to stub and mock - - the main app without any decoration - - NOT FOR PRODUCTION - - """ - storage = _init_mock_storage(base_url) - - # inject the mock data - conf = { - 'storage': storage, - 'max_log_revs': 25, - 'limiter': { - 'headers_enabled': True, - 'strategy': 'moving-window', - 'storage_uri': 'memory://', - 'storage_options': {}, - 'in_memory_fallback': ['1 per hour'], - 'shared_limits': { - 'swh_api': { - 'prefix': 'api_', - 'limits': ['1 per hour'], - 'exempted_networks': ['127.0.0.0/8'], - }, - }, - }, - } - - main.app.config.update({'conf': conf}) - - if not main.app.config['TESTING']: # HACK: install controllers only once! - main.app.config['TESTING'] = True - main.load_controllers() - main.prepare_limiter() - - return main.app.test_client(), main.app.config, storage, main.app - - -class SWHApiTestCase(unittest.TestCase): - """Testing API class. - - """ - @classmethod - def setUpClass(cls): - cls.app, cls.app_config, cls.storage, _ = create_app() - cls.maxDiff = None - - -class SWHViewTestCase(TestCase): - """Testing view class. - - cf. http://pythonhosted.org/Flask-Testing/ - """ - # This inhibits template rendering - # render_templates = False - def create_app(self): - """Initialize a Flask-Testing application instance to test view - without template rendering - - """ - _, _, _, appToDecorate = create_app() - return appToDecorate - - -class SWHApidocTestCase(SWHViewTestCase, SWHApiTestCase): - """Testing APIDoc class. - - """ diff --git a/swh/web/ui/tests/test_renderers.py b/swh/web/ui/tests/test_renderers.py deleted file mode 100644 index 2a6b8543..00000000 --- a/swh/web/ui/tests/test_renderers.py +++ /dev/null @@ -1,326 +0,0 @@ -# Copyright (C) 2015-2017 The Software Heritage developers -# See the AUTHORS file at the top-level directory of this distribution -# License: GNU Affero General Public License version 3, or any later version -# See top-level LICENSE file for more information - -import json -import unittest -import yaml - -from flask import Response -from nose.tools import istest -from unittest.mock import patch, MagicMock - -from swh.web.ui import renderers - - -class SWHComputeLinkHeaderTest(unittest.TestCase): - @istest - def compute_link_header(self): - rv = { - 'headers': {'link-next': 'foo', 'link-prev': 'bar'}, - 'results': [1, 2, 3] - } - options = {} - - # when - headers = renderers.SWHComputeLinkHeader.compute_link_header( - rv, options) - - self.assertEquals(headers, { - 'Link': '; rel="next",; rel="previous"', - }) - - @istest - def compute_link_header_nothing_changed(self): - rv = {} - options = {} - - # when - headers = renderers.SWHComputeLinkHeader.compute_link_header( - rv, options) - - self.assertEquals(headers, {}) - - @istest - def compute_link_header_nothing_changed_2(self): - rv = {'headers': {}} - options = {} - - # when - headers = renderers.SWHComputeLinkHeader.compute_link_header( - rv, options) - - self.assertEquals(headers, {}) - - -class SWHTransformProcessorTest(unittest.TestCase): - @istest - def transform_only_return_results_1(self): - rv = {'results': {'some-key': 'some-value'}} - - self.assertEquals(renderers.SWHTransformProcessor.transform(rv), - {'some-key': 'some-value'}) - - @istest - def transform_only_return_results_2(self): - rv = {'headers': {'something': 'do changes'}, - 'results': {'some-key': 'some-value'}} - - self.assertEquals(renderers.SWHTransformProcessor.transform(rv), - {'some-key': 'some-value'}) - - @istest - def transform_do_remove_headers(self): - rv = {'headers': {'something': 'do changes'}, - 'some-key': 'some-value'} - - self.assertEquals(renderers.SWHTransformProcessor.transform(rv), - {'some-key': 'some-value'}) - - @istest - def transform_do_nothing(self): - rv = {'some-key': 'some-value'} - - self.assertEquals(renderers.SWHTransformProcessor.transform(rv), - {'some-key': 'some-value'}) - - -class RendererTestCase(unittest.TestCase): - - @patch('swh.web.ui.renderers.g') - @patch('swh.web.ui.renderers.json') - @patch('swh.web.ui.renderers.request') - @patch('swh.web.ui.renderers.render_template') - @patch('swh.web.ui.renderers.SWHMultiResponse.filter_by_fields') - @patch('swh.web.ui.utils.shorten_path') - @istest - def swh_multi_response_mimetype_html(self, mock_shorten_path, mock_filter, - mock_render, mock_request, mock_json, - mock_g): - # given - data = { - 'data': [12, 34], - 'id': 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc' - } - mock_g.get.return_value = {'my_key': 'my_display_value'} - # mock_enricher.return_value = (data, {}) - mock_filter.return_value = data - mock_shorten_path.return_value = 'my_short_path' - expected_env = { - 'my_key': 'my_display_value', - 'response_data': json.dumps(data), - 'request': mock_request, - 'headers_data': {}, - 'heading': 'my_short_path', - 'status_code': 200, - } - - def mock_mimetypes(key): - mimetypes = { - 'text/html': 10, - 'application/json': 0.1, - 'application/yaml': 0.1 - } - return mimetypes[key] - accept_mimetypes = MagicMock() - accept_mimetypes.__getitem__.side_effect = mock_mimetypes - accept_mimetypes.best_match = MagicMock(return_value='text/html') - mock_request.accept_mimetypes = accept_mimetypes - mock_json.dumps.return_value = json.dumps(data) - - # when - rv = renderers.SWHMultiResponse.make_response_from_mimetype(data) - - # then - # mock_enricher.assert_called_once_with(data, {}) - mock_filter.assert_called_once_with(data) - mock_render.assert_called_with('apidoc.html', **expected_env) - self.assertEqual(rv.status_code, 200) - self.assertEqual(rv.mimetype, 'text/html') - - @patch('swh.web.ui.renderers.g') - @patch('swh.web.ui.renderers.yaml') - @patch('swh.web.ui.renderers.request') - @patch('swh.web.ui.renderers.SWHMultiResponse.filter_by_fields') - @istest - def swh_multi_response_mimetype_yaml(self, mock_filter, - mock_request, mock_yaml, mock_g): - # given - data = {'data': [12, 34], - 'id': 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc'} - - def mock_mimetypes(key): - mimetypes = { - 'application/yaml': 10, - 'application/json': 0.1, - 'text/html': 0.1 - } - return mimetypes[key] - accept_mimetypes = MagicMock() - accept_mimetypes.__getitem__.side_effect = mock_mimetypes - accept_mimetypes.best_match = MagicMock( - return_value='application/yaml') - mock_request.accept_mimetypes = accept_mimetypes - mock_yaml.dump.return_value = yaml.dump(data) - mock_filter.return_value = data - - # when - rv = renderers.SWHMultiResponse.make_response_from_mimetype(data) - - # then - mock_filter.assert_called_once_with(data) - mock_yaml.dump.assert_called_once_with(data) - self.assertEqual(rv.status_code, 200) - self.assertEqual(rv.mimetype, 'application/yaml') - self.assertEqual(data, yaml.load(rv.data.decode('utf-8'))) - - @patch('swh.web.ui.renderers.g') - @patch('swh.web.ui.renderers.json') - @patch('swh.web.ui.renderers.request') - @patch('swh.web.ui.renderers.SWHMultiResponse.filter_by_fields') - @istest - def swh_multi_response_mimetype_json(self, mock_filter, - mock_request, mock_json, mock_g): - # given - data = {'data': [12, 34], - 'id': 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc'} - - def mock_mimetypes(key): - mimetypes = { - 'application/json': 10, - 'text/html': 0.1, - 'application/yaml': 0.1 - } - return mimetypes[key] - accept_mimetypes = MagicMock() - accept_mimetypes.__getitem__.side_effect = mock_mimetypes - accept_mimetypes.best_match = MagicMock( - return_value='application/json') - mock_request.accept_mimetypes = accept_mimetypes - mock_json.dumps.return_value = json.dumps(data) - mock_filter.return_value = data - - # when - rv = renderers.SWHMultiResponse.make_response_from_mimetype(data) - - # then - mock_filter.assert_called_once_with(data) - mock_json.dumps.assert_called_once_with(data) - self.assertEqual(rv.status_code, 200) - self.assertEqual(rv.mimetype, 'application/json') - self.assertEqual(data, json.loads(rv.data.decode('utf-8'))) - - @patch('swh.web.ui.renderers.request') - @istest - def swh_multi_response_make_response_not_list_dict(self, mock_request): - # given - incoming = Response() - - # when - rv = renderers.SWHMultiResponse.make_response_from_mimetype(incoming) - - # then - self.assertEqual(rv, incoming) - - @patch('swh.web.ui.renderers.request') - @istest - def swh_filter_renderer_do_nothing(self, mock_request): - # given - mock_request.args = {} - - swh_filter_renderer = renderers.SWHFilterEnricher() - - input_data = {'a': 'some-data'} - - # when - actual_data = swh_filter_renderer.filter_by_fields(input_data) - - # then - self.assertEquals(actual_data, input_data) - - @patch('swh.web.ui.renderers.utils') - @patch('swh.web.ui.renderers.request') - @istest - def swh_filter_renderer_do_filter(self, mock_request, mock_utils): - # given - mock_request.args = {'fields': 'a,c'} - mock_utils.filter_field_keys.return_value = {'a': 'some-data'} - - swh_filter_user = renderers.SWHMultiResponse() - - input_data = {'a': 'some-data', - 'b': 'some-other-data'} - - # when - actual_data = swh_filter_user.filter_by_fields(input_data) - - # then - self.assertEquals(actual_data, {'a': 'some-data'}) - - mock_utils.filter_field_keys.assert_called_once_with(input_data, - {'a', 'c'}) - - @istest - def urlize_api_links_api(self): - # update api link with html links content with links - content = '{"url": "/api/1/abc/"}' - expected_content = ('{"url": ' - '"/api/1/abc/"}') - - self.assertEquals(renderers.urlize_api_links(content), - expected_content) - - @istest - def urlize_api_links_browse(self): - # update /browse link with html links content with links - content = '{"url": "/browse/def/"}' - expected_content = ('{"url": ' - '"' - '/browse/def/"}') - self.assertEquals(renderers.urlize_api_links(content), - expected_content) - - @istest - def urlize_header_links(self): - # update api link with html links content with links - content = """; rel="next" -; rel="prev" -""" - expected_content = """</api/1/abc/>; rel="next" -</api/1/def/>; rel="prev" -""" - - self.assertEquals(renderers.urlize_header_links(content), - expected_content) - - @istest - def revision_id_from_url(self): - url = ('/browse/revision/9ba4bcb645898d562498ea66a0df958ef0e7a68c/' - 'prev/9ba4bcb645898d562498ea66a0df958ef0e7aaaa/') - - expected_id = '9ba4bcb645898d562498ea66a0df958ef0e7a68c' - self.assertEqual(renderers.revision_id_from_url(url), expected_id) - - @istest - def safe_docstring_display(self): - # update api link with html links content with links - docstring = """This is my list header: - - - Here is item 1, with a continuation - line right here - - Here is item 2 - - Here is something that is not part of the list""" - - expected_docstring = """

This is my list header:

-
    -
  • Here is item 1, with a continuation -line right here
  • -
  • Here is item 2
  • -
-

Here is something that is not part of the list

-""" - - self.assertEquals(renderers.safe_docstring_display(docstring), - expected_docstring) diff --git a/swh/web/ui/tests/views/__init__.py b/swh/web/ui/tests/views/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/swh/web/ui/tests/views/test_browse.py b/swh/web/ui/tests/views/test_browse.py deleted file mode 100644 index 4dcc40e7..00000000 --- a/swh/web/ui/tests/views/test_browse.py +++ /dev/null @@ -1,2102 +0,0 @@ -# Copyright (C) 2015 The Software Heritage developers -# See the AUTHORS file at the top-level directory of this distribution -# License: GNU Affero General Public License version 3, or any later version -# See top-level LICENSE file for more information - -from nose.tools import istest - -from unittest import TestCase -from unittest.mock import patch - -from flask import url_for - -from swh.web.ui.views import browse -from swh.web.ui.exc import BadInputExc, NotFoundExc -from .. import test_app - - -class FileMock(): - - def __init__(self, filename): - self.filename = filename - - -class StaticViews(test_app.SWHViewTestCase): - render_template = False - - @patch('swh.web.ui.apidoc.APIUrls') - @istest - def browse_api_endpoints(self, mock_api_urls): - # given - endpoints = { - '/a/doc/endpoint/': 'relevant documentation', - '/some/other/endpoint/': 'more docstrings'} - mock_api_urls.apidoc_routes = endpoints - - # when - rv = self.client.get('/api/1/') - - # then - self.assertEquals(rv.status_code, 200) - self.assertIsNotNone( - self.get_context_variable('doc_routes'), - sorted(endpoints.items()) - ) - self.assert_template_used('api-endpoints.html') - - @istest - def browse_api_doc(self): - # given - - # when - rv = self.client.get('/api/') - - # then - self.assertEquals(rv.status_code, 200) - self.assert_template_used('api.html') - - @istest - def browse_archive(self): - # when - rv = self.client.get('/browse/') - - # then - self.assertEquals(rv.status_code, 200) - self.assert_template_used('browse.html') - - -class SearchRedirectsView(test_app.SWHViewTestCase): - render_template = False - - @istest - def search_origin_simple(self): - # when - rv = self.client.get('/origin/search/?origin_id=1&meaningless_arg=42') - - # then - self.assertRedirects(rv, url_for('browse_origin', origin_id=1)) - - @istest - def search_origin_type_url(self): - # when - rv = self.client.get('/origin/search/?origin_type=git' - '&origin_url=http://cool/project/url' - '&meaningless_arg=42') - - # then - self.assertRedirects(rv, url_for('browse_origin', - origin_type='git', - origin_url='http://cool/project/url')) - - @istest - def search_directory_dir_sha1(self): - # when - rv = self.client.get('/directory/search/?sha1_git=some_sha1' - '&path=some/path/in/folder' - '&meaningless_arg=gandalf') - - # then - self.assertRedirects(rv, url_for('browse_directory', - sha1_git='some_sha1', - path='some/path/in/folder')) - - @istest - def search_directory_dir_sha1_nopath(self): - # when - rv = self.client.get('/directory/search/?sha1_git=some_sha1' - '&meaningless_arg=gandalf') - - # then - self.assertRedirects(rv, url_for('browse_directory', - sha1_git='some_sha1')) - - @istest - def search_directory_rev_sha1(self): - # when - rv = self.client.get('/directory/search/?sha1_git=some_sha1' - '&dir_path=some/path/in/folder' - '&meaningless_arg=gandalf') - - # then - self.assertRedirects(rv, url_for('browse_revision_directory', - sha1_git='some_sha1', - dir_path='some/path/in/folder')) - - @istest - def search_directory_rev_sha1_nopath(self): - # when - rv = self.client.get('/directory/search/?sha1_git=some_sha1' - '&dir_path=' - '&meaningless_arg=gandalf') - - # then - self.assertRedirects(rv, url_for('browse_revision_directory', - sha1_git='some_sha1')) - - @istest - def search_directory_dir_time_place(self): - # when - rv = self.client.get('/directory/search/?origin_id=42' - '&branch_name=refs/heads/tail' - '&meaningless_arg=gandalf' - '&path=some/path') - - # then - self.assertRedirects(rv, url_for( - 'browse_revision_directory_through_origin', - origin_id=42, branch_name='refs/heads/tail', - path='some/path', ts=None)) - - @istest - def search_revision_sha1(self): - # when - rv = self.client.get('/revision/search/?sha1_git=some_sha1') - - # then - self.assertRedirects(rv, url_for('browse_revision', - sha1_git='some_sha1')) - - @istest - def search_revision_time_place(self): - # when - rv = self.client.get('/revision/search/?origin_id=42' - '&branch_name=big/branch/on/tree' - '&ts=meaningful_ts') - - # then - self.assertRedirects(rv, url_for('browse_revision_with_origin', - origin_id=42, - branch_name='big/branch/on/tree', - ts='meaningful_ts')) - - -class SearchSymbolView(test_app.SWHViewTestCase): - render_template = False - - @istest - def search_symbol(self): - # when - rv = self.client.get('/content/symbol/') - - self.assertEqual(rv.status_code, 200) - self.assertEqual(self.get_context_variable('result'), None) - self.assertEqual(self.get_context_variable('message'), '') - self.assertEqual(self.get_context_variable('linknext'), None) - self.assertEqual(self.get_context_variable('linkprev'), None) - self.assert_template_used('symbols.html') - - @patch('swh.web.ui.views.browse.api') - @istest - def search_symbol_with_result(self, mock_api): - # given - stub_results = [ - { - 'kind': 'function', - 'name': 'hy', - 'sha1': 'some-hash', - }, - ] - mock_api.api_content_symbol.return_value = { - 'results': stub_results, - } - - # when - rv = self.client.get('/content/symbol/?q=hy') - - self.assertEqual(rv.status_code, 200) - self.assertEqual(self.get_context_variable('result'), stub_results) - - self.assertEqual(self.get_context_variable('message'), '') - self.assertEqual(self.get_context_variable('linknext'), None) - self.assertEqual(self.get_context_variable('linkprev'), None) - self.assert_template_used('symbols.html') - - mock_api.api_content_symbol.assert_called_once_with('hy') - - @patch('swh.web.ui.views.browse.api') - @istest - def search_symbol_with_result_and_pages(self, mock_api): - # given - stub_results = [ - { - 'kind': 'function', - 'name': 'hy', - 'sha1': 'some-hash', - } - ] - mock_api.api_content_symbol.return_value = { - 'results': stub_results, - 'headers': { - 'link-next': 'some-link', - } - } - - # when - rv = self.client.get('/content/symbol/?q=hy&per_page=1') - - self.assertEqual(rv.status_code, 200) - self.assertEqual(self.get_context_variable('result'), stub_results) - - self.assertEqual(self.get_context_variable('message'), '') - self.assertIsNotNone(self.get_context_variable('linknext')) - self.assertEqual(self.get_context_variable('linkprev'), None) - self.assert_template_used('symbols.html') - - mock_api.api_content_symbol.assert_called_once_with('hy') - - @patch('swh.web.ui.views.browse.api') - @istest - def search_symbol_bad_input(self, mock_api): - # given - mock_api.api_content_symbol.side_effect = BadInputExc('error msg') - - # when - rv = self.client.get('/content/symbol/?q=hello|hy') - - self.assertEqual(rv.status_code, 200) - self.assertEqual(self.get_context_variable('message'), 'error msg') - self.assertEqual(self.get_context_variable('result'), None) - self.assertEqual(self.get_context_variable('linknext'), None) - self.assertEqual(self.get_context_variable('linkprev'), None) - self.assert_template_used('symbols.html') - - mock_api.api_content_symbol.assert_called_once_with('hello|hy') - - -class SearchView(test_app.SWHViewTestCase): - render_template = False - - @istest - def search_default(self): - # when - rv = self.client.get('/content/search/') - - self.assertEqual(rv.status_code, 200) - self.assertEqual(self.get_context_variable('message'), '') - self.assertEqual(self.get_context_variable('search_res'), None) - self.assert_template_used('search.html') - - @patch('swh.web.ui.views.browse.api') - @istest - def search_get_query_hash_not_found(self, mock_api): - # given - mock_api.api_check_content_known.return_value = { - 'search_res': [{ - 'filename': None, - 'sha1': 'sha1:456', - 'found': False}], - 'search_stats': {'nbfiles': 1, 'pct': 100}} - - # when - rv = self.client.get('/content/search/?q=sha1:456') - - self.assertEqual(rv.status_code, 200) - self.assertEqual(self.get_context_variable('message'), '') - self.assertEqual(self.get_context_variable('search_res'), [ - {'filename': None, - 'sha1': 'sha1:456', - 'found': False}]) - self.assert_template_used('search.html') - - mock_api.api_check_content_known.assert_called_once_with('sha1:456') - - @patch('swh.web.ui.views.browse.api') - @istest - def search_get_query_hash_bad_input(self, mock_api): - # given - mock_api.api_check_content_known.side_effect = BadInputExc('error msg') - - # when - rv = self.client.get('/content/search/?q=sha1_git:789') - - self.assertEqual(rv.status_code, 200) - self.assertEqual(self.get_context_variable('message'), 'error msg') - self.assertEqual(self.get_context_variable('search_res'), None) - self.assert_template_used('search.html') - - mock_api.api_check_content_known.assert_called_once_with( - 'sha1_git:789') - - @patch('swh.web.ui.views.browse.api') - @istest - def search_get_query_hash_found(self, mock_api): - # given - mock_api.api_check_content_known.return_value = { - 'search_res': [{ - 'filename': None, - 'sha1': 'sha1:123', - 'found': True}], - 'search_stats': {'nbfiles': 1, 'pct': 100}} - - # when - rv = self.client.get('/content/search/?q=sha1:123') - - self.assertEqual(rv.status_code, 200) - self.assertEqual(self.get_context_variable('message'), '') - self.assertEqual(len(self.get_context_variable('search_res')), 1) - resp = self.get_context_variable('search_res')[0] - self.assertTrue(resp is not None) - self.assertEqual(resp['sha1'], 'sha1:123') - self.assertEqual(resp['found'], True) - self.assert_template_used('search.html') - - mock_api.api_check_content_known.assert_called_once_with('sha1:123') - - @patch('swh.web.ui.views.browse.request') - @patch('swh.web.ui.views.browse.api') - @istest - def search_post_hashes_bad_input(self, mock_api, mock_request): - # given - mock_request.form = {'a': ['456caf10e9535160d90e874b45aa426de762f19f'], - 'b': ['745bab676c8f3cec8016e0c39ea61cf57e518865']} - mock_request.method = 'POST' - mock_api.api_check_content_known.side_effect = BadInputExc( - 'error bad input') - - # when (mock_request completes the post request) - rv = self.client.post('/content/search/') - - # then - self.assertEqual(rv.status_code, 200) - self.assertEqual(self.get_context_variable('search_stats'), - {'nbfiles': 0, 'pct': 0}) - self.assertEqual(self.get_context_variable('search_res'), None) - self.assertEqual(self.get_context_variable('message'), - 'error bad input') - self.assert_template_used('search.html') - - @patch('swh.web.ui.views.browse.request') - @patch('swh.web.ui.views.browse.api') - @istest - def search_post_hashes_none(self, mock_api, mock_request): - # given - mock_request.form = {'a': ['456caf10e9535160d90e874b45aa426de762f19f'], - 'b': ['745bab676c8f3cec8016e0c39ea61cf57e518865']} - mock_request.method = 'POST' - mock_api.api_check_content_known.return_value = { - 'search_stats': {'nbfiles': 2, 'pct': 0}, - 'search_res': [{'filename': 'a', - 'sha1': '456caf10e9535160d90e874b45aa426de762f19f', - 'found': False}, - {'filename': 'b', - 'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865', - 'found': False}]} - - # when (mock_request completes the post request) - rv = self.client.post('/content/search/') - - # then - self.assertEqual(rv.status_code, 200) - self.assertIsNotNone(self.get_context_variable('search_res')) - self.assertTrue(self.get_context_variable('search_stats') is not None) - self.assertEqual(len(self.get_context_variable('search_res')), 2) - - stats = self.get_context_variable('search_stats') - self.assertEqual(stats['nbfiles'], 2) - self.assertEqual(stats['pct'], 0) - - a, b = self.get_context_variable('search_res') - self.assertEqual(a['found'], False) - self.assertEqual(b['found'], False) - self.assertEqual(self.get_context_variable('message'), '') - - self.assert_template_used('search.html') - - @patch('swh.web.ui.views.browse.request') - @patch('swh.web.ui.views.browse.api') - @istest - def search_post_hashes_some(self, mock_api, mock_request): - # given - mock_request.form = {'a': '456caf10e9535160d90e874b45aa426de762f19f', - 'b': '745bab676c8f3cec8016e0c39ea61cf57e518865'} - mock_request.method = 'POST' - mock_api.api_check_content_known.return_value = { - 'search_stats': {'nbfiles': 2, 'pct': 50}, - 'search_res': [{'filename': 'a', - 'sha1': '456caf10e9535160d90e874b45aa426de762f19f', - 'found': False}, - {'filename': 'b', - 'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865', - 'found': True}]} - - # when (mock_request completes the post request) - rv = self.client.post('/content/search/') - - # then - self.assertEqual(rv.status_code, 200) - self.assertIsNotNone(self.get_context_variable('search_res')) - self.assertEqual(len(self.get_context_variable('search_res')), 2) - self.assertTrue(self.get_context_variable('search_stats') is not None) - - stats = self.get_context_variable('search_stats') - self.assertEqual(stats['nbfiles'], 2) - self.assertEqual(stats['pct'], 50) - self.assertEqual(self.get_context_variable('message'), '') - - a, b = self.get_context_variable('search_res') - self.assertEqual(a['found'], False) - self.assertEqual(b['found'], True) - self.assert_template_used('search.html') - - -class ContentView(test_app.SWHViewTestCase): - render_template = False - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_content_ko_not_found(self, mock_api): - # given - mock_api.api_content_metadata.side_effect = NotFoundExc( - 'Not found!') - - # when - rv = self.client.get('/browse/content/sha1:sha1-hash/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('content.html') - self.assertEqual(self.get_context_variable('message'), - 'Not found!') - self.assertIsNone(self.get_context_variable('content')) - - mock_api.api_content_metadata.assert_called_once_with( - 'sha1:sha1-hash') - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_content_ko_bad_input(self, mock_api): - # given - mock_api.api_content_metadata.side_effect = BadInputExc( - 'Bad input!') - - # when - rv = self.client.get('/browse/content/sha1:sha1-hash/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('content.html') - self.assertEqual(self.get_context_variable('message'), - 'Bad input!') - self.assertIsNone(self.get_context_variable('content')) - - mock_api.api_content_metadata.assert_called_once_with( - 'sha1:sha1-hash') - - @patch('swh.web.ui.views.browse.service') - @patch('swh.web.ui.views.browse.api') - @istest - def browse_content(self, mock_api, mock_service): - # given - stub_content = { - 'sha1': 'sha1-hash' - } - mock_api.api_content_metadata.return_value = stub_content - mock_api.api_content_filetype.return_value = { - 'mimetype': 'text/plain', - } - mock_api.api_content_language.return_value = { - 'lang': 'Hy', - } - mock_api.api_content_license.return_value = { - 'licenses': ['MIT', 'BSD'], - } - mock_service.lookup_content_raw.return_value = { - 'data': b'blah' - } - mock_api.api_content_ctags.return_value = [ - { - 'line': 12, - }, - { - 'line': 14, - } - ] - - expected_content = { - 'sha1': 'sha1-hash', - 'data': 'blah', - 'encoding': None, - 'mimetype': 'text/plain', - 'language': 'Hy', - 'licenses': "MIT, BSD", - } - - # when - rv = self.client.get('/browse/content/sha1:sha1-hash/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('content.html') - self.assertIsNone(self.get_context_variable('message')) - actual_content = self.get_context_variable('content') - actual_content.pop('ctags') - self.assertEqual(actual_content, expected_content) - - mock_service.lookup_content_raw.assert_called_once_with( - 'sha1:sha1-hash') - mock_api.api_content_language.assert_called_once_with('sha1:sha1-hash') - mock_api.api_content_filetype.assert_called_once_with('sha1:sha1-hash') - mock_api.api_content_license.assert_called_once_with('sha1:sha1-hash') - mock_api.api_content_metadata.assert_called_once_with('sha1:sha1-hash') - mock_api.api_content_ctags.assert_called_once_with('sha1:sha1-hash') - - @patch('swh.web.ui.views.browse.service') - @patch('swh.web.ui.views.browse.api') - @istest - def browse_content_less_data(self, mock_api, mock_service): - # given - stub_content = { - 'sha1': 'ha1', - } - mock_api.api_content_metadata.return_value = stub_content - mock_api.api_content_filetype.return_value = None - mock_api.api_content_language.return_value = None - mock_api.api_content_license.return_value = None - mock_service.lookup_content_raw.return_value = None - mock_api.api_content_ctags.return_value = [] - - expected_content = { - 'sha1': 'ha1', - 'data': None, - 'encoding': None, - 'mimetype': None, - 'language': None, - 'licenses': None, - 'ctags': None, - } - - # when - rv = self.client.get('/browse/content/sha1:ha1/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('content.html') - self.assertIsNone(self.get_context_variable('message')) - actual_content = self.get_context_variable('content') - self.assertEqual(actual_content, expected_content) - - mock_service.lookup_content_raw.assert_called_once_with('sha1:ha1') - mock_api.api_content_language.assert_called_once_with('sha1:ha1') - mock_api.api_content_filetype.assert_called_once_with('sha1:ha1') - mock_api.api_content_license.assert_called_once_with('sha1:ha1') - mock_api.api_content_metadata.assert_called_once_with('sha1:ha1') - mock_api.api_content_ctags.assert_called_once_with('sha1:ha1') - - @patch('swh.web.ui.views.browse.redirect') - @patch('swh.web.ui.views.browse.url_for') - @istest - def browse_content_raw(self, mock_urlfor, mock_redirect): - # given - stub_content_raw = b'some-data' - mock_urlfor.return_value = '/api/content/sha1:sha1-hash/raw/' - mock_redirect.return_value = stub_content_raw - - # when - rv = self.client.get('/browse/content/sha1:sha1-hash/raw/') - - self.assertEqual(rv.status_code, 200) - self.assertEqual(rv.data, stub_content_raw) - - mock_urlfor.assert_called_once_with('api_content_raw', - q='sha1:sha1-hash') - mock_redirect.assert_called_once_with( - '/api/content/sha1:sha1-hash/raw/') - - -class DirectoryView(test_app.SWHViewTestCase): - render_template = False - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_directory_ko_bad_input(self, mock_api): - # given - mock_api.api_directory.side_effect = BadInputExc( - 'Invalid hash') - - # when - rv = self.client.get('/browse/directory/sha2-invalid/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('directory.html') - self.assertEqual(self.get_context_variable('message'), - 'Invalid hash') - self.assertEqual(self.get_context_variable('files'), []) - mock_api.api_directory.assert_called_once_with( - 'sha2-invalid') - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_directory_empty_result(self, mock_api): - # given - mock_api.api_directory.return_value = [] - - # when - rv = self.client.get('/browse/directory/some-sha1/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('directory.html') - self.assertEqual(self.get_context_variable('message'), - 'Listing for directory some-sha1:') - self.assertEqual(self.get_context_variable('files'), []) - mock_api.api_directory.assert_called_once_with( - 'some-sha1') - - @patch('swh.web.ui.views.browse.service') - @patch('swh.web.ui.views.browse.api') - @istest - def browse_directory_relative_file(self, mock_api, mock_service): - # given - stub_entry = { - 'sha256': '240', - 'type': 'file' - } - mock_service.lookup_directory_with_path.return_value = stub_entry - stub_file = { - 'sha1_git': '123', - 'sha1': '456', - 'status': 'visible', - 'data_url': '/api/1/content/890', - 'length': 42, - 'ctime': 'Thu, 01 Oct 2015 12:13:53 GMT', - 'target': 'file.txt', - 'sha256': '148' - } - mock_api.api_content_metadata.return_value = stub_file - mock_service.lookup_content_raw.return_value = { - 'data': 'this is my file'} - - # when - rv = self.client.get('/browse/directory/sha1/path/to/file/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('content.html') - self.assertIsNotNone(self.get_context_variable('content')) - content = self.get_context_variable('content') - # change caused by call to prepare_data_for_view - self.assertEqual(content['data_url'], '/browse/content/890') - self.assertEqual(content['data'], 'this is my file') - mock_api.api_content_metadata.assert_called_once_with('sha256:240') - mock_service.lookup_content_raw.assert_called_once_with('sha256:240') - - @patch('swh.web.ui.views.browse.service') - @patch('swh.web.ui.views.browse.api') - @istest - def browse_directory_relative_dir(self, mock_api, mock_service): - # given - mock_service.lookup_directory_with_path.return_value = { - 'sha256': '240', - 'target': 'abcd', - 'type': 'dir' - } - - stub_directory_ls = [ - {'type': 'dir', - 'target': '123', - 'name': 'some-dir-name'}, - {'type': 'file', - 'sha1': '654', - 'name': 'some-filename'}, - {'type': 'dir', - 'target': '987', - 'name': 'some-other-dirname'} - ] - mock_api.api_directory.return_value = stub_directory_ls - - # when - rv = self.client.get('/browse/directory/sha1/path/to/dir/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('directory.html') - self.assertIsNotNone(self.get_context_variable('files')) - self.assertEqual(len(self.get_context_variable('files')), - len(stub_directory_ls)) - mock_api.api_directory.assert_called_once_with('abcd') - - @patch('swh.web.ui.views.browse.service') - @patch('swh.web.ui.views.browse.api') - @istest - def browse_directory_relative_not_found(self, mock_api, mock_service): - # given - mock_service.lookup_directory_with_path.side_effect = NotFoundExc( - 'Directory entry not found.') - - # when - rv = self.client.get('/browse/directory/some-sha1/some/path/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('directory.html') - self.assertEqual(self.get_context_variable('message'), - 'Directory entry not found.') - - @patch('swh.web.ui.views.browse.api') - @patch('swh.web.ui.views.browse.utils') - @istest - def browse_directory(self, mock_utils, mock_api): - # given - stub_directory_ls = [ - {'type': 'dir', - 'target': '123', - 'name': 'some-dir-name'}, - {'type': 'file', - 'sha1': '654', - 'name': 'some-filename'}, - {'type': 'dir', - 'target': '987', - 'name': 'some-other-dirname'} - ] - mock_api.api_directory.return_value = stub_directory_ls - stub_directory_map = [ - {'link': '/path/to/url/dir/123', - 'name': 'some-dir-name'}, - {'link': '/path/to/url/file/654', - 'name': 'some-filename'}, - {'link': '/path/to/url/dir/987', - 'name': 'some-other-dirname'} - ] - mock_utils.prepare_data_for_view.return_value = stub_directory_map - - # when - rv = self.client.get('/browse/directory/some-sha1/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('directory.html') - self.assertEqual(self.get_context_variable('message'), - 'Listing for directory some-sha1:') - self.assertEqual(self.get_context_variable('files'), - stub_directory_map) - - mock_api.api_directory.assert_called_once_with( - 'some-sha1') - mock_utils.prepare_data_for_view.assert_called_once_with( - stub_directory_ls) - - -class ContentWithOriginView(test_app.SWHViewTestCase): - render_template = False - - @patch('swh.web.ui.views.browse.api') -# @istest - def browse_content_with_origin_content_ko_not_found(self, mock_api): - # given - mock_api.api_content_checksum_to_origin.side_effect = NotFoundExc( - 'Not found!') - - # when - rv = self.client.get('/browse/content/sha256:some-sha256/origin/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('content-with-origin.html') - self.assertEqual(self.get_context_variable('message'), - 'Not found!') - - mock_api.api_content_checksum_to_origin.assert_called_once_with( - 'sha256:some-sha256') - - @patch('swh.web.ui.views.browse.api') -# @istest - def browse_content_with_origin_ko_bad_input(self, mock_api): - # given - mock_api.api_content_checksum_to_origin.side_effect = BadInputExc( - 'Invalid hash') - - # when - rv = self.client.get('/browse/content/sha256:some-sha256/origin/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('content-with-origin.html') - self.assertEqual( - self.get_context_variable('message'), 'Invalid hash') - - mock_api.api_content_checksum_to_origin.assert_called_once_with( - 'sha256:some-sha256') - - @patch('swh.web.ui.views.browse.api') -# @istest - def browse_content_with_origin(self, mock_api): - # given - mock_api.api_content_checksum_to_origin.return_value = { - 'origin_type': 'ftp', - 'origin_url': '/some/url', - 'revision': 'revision-hash', - 'branch': 'master', - 'path': '/path/to', - } - - # when - rv = self.client.get('/browse/content/sha256:some-sha256/origin/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('content-with-origin.html') - self.assertEqual( - self.get_context_variable('message'), - "The content with hash sha256:some-sha256 has been seen on " + - "origin with type 'ftp'\n" + - "at url '/some/url'. The revision was identified at " + - "'revision-hash' on branch 'master'.\n" + - "The file's path referenced was '/path/to'.") - - mock_api.api_content_checksum_to_origin.assert_called_once_with( - 'sha256:some-sha256') - - -class OriginView(test_app.SWHViewTestCase): - render_template = False - - def setUp(self): - - def url_for_test(fn, **args): - if fn == 'browse_revision_with_origin': - return '/browse/revision/origin/%s/' % args['origin_id'] - elif fn == 'browse_origin_visits': - return '/browse/visits/%s/' % args['origin_id'] - - self.url_for_test = url_for_test - - self.stub_origin = {'type': 'git', - 'lister': None, - 'project': None, - 'url': 'rsync://some/url', - 'id': 426} - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_origin_ko_not_found(self, mock_api): - # given - mock_api.api_origin.side_effect = NotFoundExc('Not found!') - - # when - rv = self.client.get('/browse/origin/1/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('origin.html') - self.assertIsNone(self.get_context_variable('origin')) - self.assertEqual( - self.get_context_variable('message'), - 'Not found!') - - mock_api.api_origin.assert_called_once_with(1, None, None) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_origin_ko_bad_input(self, mock_api): - # given - mock_api.api_origin.side_effect = BadInputExc('wrong input') - - # when - rv = self.client.get('/browse/origin/426/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('origin.html') - self.assertIsNone(self.get_context_variable('origin')) - - mock_api.api_origin.assert_called_once_with(426, None, None) - - @patch('swh.web.ui.views.browse.api') - @patch('swh.web.ui.views.browse.url_for') - @istest - def browse_origin_found_id(self, mock_url_for, mock_api): - # given - - mock_url_for.side_effect = self.url_for_test - - mock_api.api_origin.return_value = self.stub_origin - - # when - rv = self.client.get('/browse/origin/426/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('origin.html') - self.assertEqual(self.get_context_variable('origin'), self.stub_origin) - self.assertEqual(self.get_context_variable('browse_url'), - '/browse/revision/origin/426/') - self.assertEqual(self.get_context_variable('visit_url'), - '/browse/visits/426/') - - mock_api.api_origin.assert_called_once_with(426, None, None) - - @patch('swh.web.ui.views.browse.api') - @patch('swh.web.ui.views.browse.url_for') - @istest - def browse_origin_found_url_type(self, mock_url_for, mock_api): - # given - - mock_url_for.side_effect = self.url_for_test - - mock_api.api_origin.return_value = self.stub_origin - - # when - rv = self.client.get('/browse/origin/git/url/rsync://some/url/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('origin.html') - self.assertEqual(self.get_context_variable('origin'), self.stub_origin) - self.assertEqual(self.get_context_variable('browse_url'), - '/browse/revision/origin/426/') - self.assertEqual(self.get_context_variable('visit_url'), - '/browse/visits/426/') - - mock_api.api_origin.assert_called_once_with(None, 'git', - 'rsync://some/url') - - -class PersonView(test_app.SWHViewTestCase): - render_template = False - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_person_ko_not_found(self, mock_api): - # given - mock_api.api_person.side_effect = NotFoundExc('not found') - - # when - rv = self.client.get('/browse/person/1/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('person.html') - self.assertEqual(self.get_context_variable('person_id'), 1) - self.assertEqual( - self.get_context_variable('message'), - 'not found') - - mock_api.api_person.assert_called_once_with(1) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_person_ko_bad_input(self, mock_api): - # given - mock_api.api_person.side_effect = BadInputExc('wrong input') - - # when - rv = self.client.get('/browse/person/426/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('person.html') - self.assertEqual(self.get_context_variable('person_id'), 426) - - mock_api.api_person.assert_called_once_with(426) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_person(self, mock_api): - # given - mock_person = {'type': 'git', - 'lister': None, - 'project': None, - 'url': 'rsync://some/url', - 'id': 426} - mock_api.api_person.return_value = mock_person - - # when - rv = self.client.get('/browse/person/426/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('person.html') - self.assertEqual(self.get_context_variable('person_id'), 426) - self.assertEqual(self.get_context_variable('person'), mock_person) - - mock_api.api_person.assert_called_once_with(426) - - -class ReleaseView(test_app.SWHViewTestCase): - render_template = False - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_release_ko_not_found(self, mock_api): - # given - mock_api.api_release.side_effect = NotFoundExc('not found!') - - # when - rv = self.client.get('/browse/release/1/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('release.html') - self.assertEqual(self.get_context_variable('sha1_git'), '1') - self.assertEqual( - self.get_context_variable('message'), - 'not found!') - - mock_api.api_release.assert_called_once_with('1') - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_release_ko_bad_input(self, mock_api): - # given - mock_api.api_release.side_effect = BadInputExc('wrong input') - - # when - rv = self.client.get('/browse/release/426/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('release.html') - self.assertEqual(self.get_context_variable('sha1_git'), '426') - - mock_api.api_release.assert_called_once_with('426') - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_release(self, mock_api): - # given - self.maxDiff = None - mock_release = { - "date": "Sun, 05 Jul 2015 18:02:06 GMT", - "id": "1e951912027ea6873da6985b91e50c47f645ae1a", - "target": "d770e558e21961ad6cfdf0ff7df0eb5d7d4f0754", - "target_url": '/browse/revision/d770e558e21961ad6cfdf0ff7df0' - 'eb5d7d4f0754/', - "synthetic": False, - "target_type": "revision", - "author": { - "email": "torvalds@linux-foundation.org", - "name": "Linus Torvalds" - }, - "message": "Linux 4.2-rc1\n", - "name": "v4.2-rc1" - } - mock_api.api_release.return_value = mock_release - - expected_release = { - "date": "Sun, 05 Jul 2015 18:02:06 GMT", - "id": "1e951912027ea6873da6985b91e50c47f645ae1a", - "target_url": '/browse/revision/d770e558e21961ad6cfdf0ff7df0' - 'eb5d7d4f0754/', - "target": 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754', - "synthetic": False, - "target_type": "revision", - "author": { - "email": "torvalds@linux-foundation.org", - "name": "Linus Torvalds" - }, - "message": "Linux 4.2-rc1\n", - "name": "v4.2-rc1" - } - - # when - rv = self.client.get('/browse/release/426/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('release.html') - self.assertEqual(self.get_context_variable('sha1_git'), '426') - self.assertEqual(self.get_context_variable('release'), - expected_release) - - mock_api.api_release.assert_called_once_with('426') - - -class RevisionView(test_app.SWHViewTestCase): - render_template = False - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_ko_not_found(self, mock_api): - # given - mock_api.api_revision.side_effect = NotFoundExc('Not found!') - - # when - rv = self.client.get('/browse/revision/1/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision.html') - self.assertEqual(self.get_context_variable('sha1_git'), '1') - self.assertEqual( - self.get_context_variable('message'), - 'Not found!') - self.assertIsNone(self.get_context_variable('revision')) - - mock_api.api_revision.assert_called_once_with('1', None) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_ko_bad_input(self, mock_api): - # given - mock_api.api_revision.side_effect = BadInputExc('wrong input!') - - # when - rv = self.client.get('/browse/revision/426/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision.html') - self.assertEqual(self.get_context_variable('sha1_git'), '426') - self.assertEqual( - self.get_context_variable('message'), - 'wrong input!') - self.assertIsNone(self.get_context_variable('revision')) - - mock_api.api_revision.assert_called_once_with('426', None) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision(self, mock_api): - # given - stub_revision = { - 'id': 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754', - 'date': 'Sun, 05 Jul 2015 18:01:52 GMT', - 'committer': { - 'email': 'torvalds@linux-foundation.org', - 'name': 'Linus Torvalds' - }, - 'committer_date': 'Sun, 05 Jul 2015 18:01:52 GMT', - 'type': 'git', - 'author': { - 'email': 'torvalds@linux-foundation.org', - 'name': 'Linus Torvalds' - }, - 'message': 'Linux 4.2-rc1\n', - 'synthetic': False, - 'directory_url': '/api/1/directory/' - '2a1dbabeed4dcf1f4a4c441993b2ffc9d972780b/', - 'parent_url': [ - '/api/1/revision/a585d2b738bfa26326b3f1f40f0f1eda0c067ccf/' - ], - } - mock_api.api_revision.return_value = stub_revision - - expected_revision = { - 'id': 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754', - 'date': 'Sun, 05 Jul 2015 18:01:52 GMT', - 'committer': { - 'email': 'torvalds@linux-foundation.org', - 'name': 'Linus Torvalds' - }, - 'committer_date': 'Sun, 05 Jul 2015 18:01:52 GMT', - 'type': 'git', - 'author': { - 'email': 'torvalds@linux-foundation.org', - 'name': 'Linus Torvalds' - }, - 'message': 'Linux 4.2-rc1\n', - 'synthetic': False, - 'parent_url': [ - '/browse/revision/a585d2b738bfa26326b3f1f40f0f1eda0c067ccf/' - ], - 'directory_url': '/browse/directory/2a1dbabeed4dcf1f4a4c441993b2f' - 'fc9d972780b/', - } - - # when - rv = self.client.get('/browse/revision/426/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision.html') - self.assertEqual(self.get_context_variable('sha1_git'), '426') - self.assertEqual(self.get_context_variable('revision'), - expected_revision) - self.assertIsNone(self.get_context_variable('message')) - - mock_api.api_revision.assert_called_once_with('426', None) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_raw_message(self, mock_api): - # given - sha1 = 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754' - - # when - rv = self.client.get('/browse/revision/' - 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754/raw/') - - self.assertRedirects( - rv, '/api/1/revision/%s/raw/' % sha1) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_log_ko_not_found(self, mock_api): - # given - mock_api.api_revision_log.side_effect = NotFoundExc('Not found!') - - # when - rv = self.client.get('/browse/revision/sha1/log/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision-log.html') - self.assertEqual(self.get_context_variable('sha1_git'), 'sha1') - self.assertEqual( - self.get_context_variable('message'), - 'Not found!') - self.assertEqual(self.get_context_variable('revisions'), []) - - mock_api.api_revision_log.assert_called_once_with('sha1', None) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_log_ko_bad_input(self, mock_api): - # given - mock_api.api_revision_log.side_effect = BadInputExc('wrong input!') - - # when - rv = self.client.get('/browse/revision/426/log/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision-log.html') - self.assertEqual(self.get_context_variable('sha1_git'), '426') - self.assertEqual( - self.get_context_variable('message'), - 'wrong input!') - self.assertEqual(self.get_context_variable('revisions'), []) - - mock_api.api_revision_log.assert_called_once_with('426', None) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_log(self, mock_api): - # given - stub_revisions = { - 'revisions': [{ - 'id': 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754', - 'date': 'Sun, 05 Jul 2015 18:01:52 GMT', - 'committer': { - 'email': 'torvalds@linux-foundation.org', - 'name': 'Linus Torvalds' - }, - 'committer_date': 'Sun, 05 Jul 2015 18:01:52 GMT', - 'type': 'git', - 'author': { - 'email': 'torvalds@linux-foundation.org', - 'name': 'Linus Torvalds' - }, - 'message': 'Linux 4.2-rc1\n', - 'synthetic': False, - 'directory_url': '/api/1/directory/' - '2a1dbabeed4dcf1f4a4c441993b2ffc9d972780b/', - 'parent_url': [ - '/api/1/revision/a585d2b738bfa26326b3f1f40f0f1eda0c067ccf/' - ], - }], - 'next_revs_url': '/api/1/revision/1234/log/' - } - mock_api.api_revision_log.return_value = stub_revisions - - # when - rv = self.client.get('/browse/revision/426/log/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision-log.html') - self.assertEqual(self.get_context_variable('sha1_git'), '426') - self.assertTrue( - isinstance(self.get_context_variable('revisions'), map)) - self.assertEqual( - self.get_context_variable('next_revs_url'), - '/browse/revision/1234/log/') - self.assertIsNone(self.get_context_variable('message')) - - mock_api.api_revision_log.assert_called_once_with('426', None) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_log_by_ko_not_found(self, mock_api): - # given - mock_api.api_revision_log_by.side_effect = NotFoundExc('Not found!') - - # when - rv = self.client.get('/browse/revision/origin/9/log/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision-log.html') - self.assertEqual(self.get_context_variable('origin_id'), 9) - self.assertEqual( - self.get_context_variable('message'), - 'Not found!') - self.assertEqual(self.get_context_variable('revisions'), []) - - mock_api.api_revision_log_by.assert_called_once_with( - 9, 'refs/heads/master', None) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_log_by_ko_bad_input(self, mock_api): - # given - mock_api.api_revision_log.side_effect = BadInputExc('wrong input!') - - # when - rv = self.client.get('/browse/revision/abcd/log/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision-log.html') - self.assertEqual(self.get_context_variable('sha1_git'), 'abcd') - self.assertEqual( - self.get_context_variable('message'), - 'wrong input!') - self.assertEqual(self.get_context_variable('revisions'), []) - - mock_api.api_revision_log.assert_called_once_with('abcd', None) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_log_by(self, mock_api): - # given - stub_revisions = [{ - 'id': 'd770e558e21961ad6cfdf0ff7df0eb5d7d4f0754', - 'date': 'Sun, 05 Jul 2015 18:01:52 GMT', - 'committer': { - 'email': 'torvalds@linux-foundation.org', - 'name': 'Linus Torvalds' - }, - 'committer_date': 'Sun, 05 Jul 2015 18:01:52 GMT', - 'type': 'git', - 'author': { - 'email': 'torvalds@linux-foundation.org', - 'name': 'Linus Torvalds' - }, - 'message': 'Linux 4.2-rc1\n', - 'synthetic': False, - 'directory_url': '/api/1/directory/' - '2a1dbabeed4dcf1f4a4c441993b2ffc9d972780b/', - 'parent_url': [ - '/api/1/revision/a585d2b738bfa26326b3f1f40f0f1eda0c067ccf/' - ], - }] - mock_api.api_revision_log_by.return_value = stub_revisions - - # when - rv = self.client.get('/browse/revision/origin/2/log/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision-log.html') - self.assertEqual(self.get_context_variable('origin_id'), 2) - self.assertTrue( - isinstance(self.get_context_variable('revisions'), map)) - self.assertIsNone(self.get_context_variable('message')) - - mock_api.api_revision_log_by.assert_called_once_with( - 2, 'refs/heads/master', None) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_history_ko_not_found(self, mock_api): - # given - mock_api.api_revision_history.side_effect = NotFoundExc( - 'Not found') - - # when - rv = self.client.get('/browse/revision/1/history/2/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision.html') - self.assertEqual(self.get_context_variable('sha1_git_root'), '1') - self.assertEqual(self.get_context_variable('sha1_git'), '2') - self.assertEqual( - self.get_context_variable('message'), - 'Not found') - - mock_api.api_revision_history.assert_called_once_with( - '1', '2') - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_history_ko_bad_input(self, mock_api): - # given - mock_api.api_revision_history.side_effect = BadInputExc( - 'Input incorrect') - - # when - rv = self.client.get('/browse/revision/321/history/654/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision.html') - self.assertEqual(self.get_context_variable('sha1_git_root'), '321') - self.assertEqual(self.get_context_variable('sha1_git'), '654') - self.assertEqual( - self.get_context_variable('message'), - 'Input incorrect') - - mock_api.api_revision_history.assert_called_once_with( - '321', '654') - - @istest - def browse_revision_history_ok_same_sha1(self): - # when - rv = self.client.get('/browse/revision/10/history/10/') - - # then - self.assertEqual(rv.status_code, 302) - - @patch('swh.web.ui.views.browse.utils') - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_history(self, mock_api, mock_utils): - # given - stub_revision = {'id': 'some-rev'} - mock_api.api_revision_history.return_value = stub_revision - - expected_revision = { - 'id': 'some-rev-id', - 'author': {'name': 'foo', 'email': 'bar'}, - 'committer': {'name': 'foo', 'email': 'bar'} - } - mock_utils.prepare_data_for_view.return_value = expected_revision - - # when - rv = self.client.get('/browse/revision/426/history/789/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision.html') - self.assertEqual(self.get_context_variable('sha1_git_root'), '426') - self.assertEqual(self.get_context_variable('sha1_git'), '789') - self.assertEqual(self.get_context_variable('revision'), - expected_revision) - - mock_api.api_revision_history.assert_called_once_with( - '426', '789') - mock_utils.prepare_data_for_view.assert_called_once_with(stub_revision) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_directory_ko_not_found(self, mock_api): - # given - mock_api.api_revision_directory.side_effect = NotFoundExc('Not found!') - - # when - rv = self.client.get('/browse/revision/1/directory/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision-directory.html') - self.assertEqual(self.get_context_variable('sha1_git'), '1') - self.assertEqual(self.get_context_variable('path'), '.') - self.assertIsNone(self.get_context_variable('result')) - self.assertEqual( - self.get_context_variable('message'), - "Not found!") - - mock_api.api_revision_directory.assert_called_once_with( - '1', None, with_data=True) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_directory_ko_bad_input(self, mock_api): - # given - mock_api.api_revision_directory.side_effect = BadInputExc('Bad input!') - - # when - rv = self.client.get('/browse/revision/10/directory/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision-directory.html') - self.assertEqual(self.get_context_variable('sha1_git'), '10') - self.assertEqual(self.get_context_variable('path'), '.') - self.assertIsNone(self.get_context_variable('result')) - self.assertEqual( - self.get_context_variable('message'), - "Bad input!") - - mock_api.api_revision_directory.assert_called_once_with( - '10', None, with_data=True) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_directory(self, mock_api): - # given - stub_result0 = { - 'type': 'dir', - 'revision': '100', - 'content': [ - { - 'id': 'some-result', - 'type': 'file', - 'name': 'blah', - }, - { - 'id': 'some-other-result', - 'type': 'dir', - 'name': 'foo', - } - ] - } - - mock_api.api_revision_directory.return_value = stub_result0 - - stub_result1 = { - 'type': 'dir', - 'revision': '100', - 'content': - [ - { - 'id': 'some-result', - 'type': 'file', - 'name': 'blah', - }, - { - 'id': 'some-other-result', - 'type': 'dir', - 'name': 'foo', - } - ] - } - - # when - rv = self.client.get('/browse/revision/100/directory/some/path/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision-directory.html') - self.assertEqual(self.get_context_variable('sha1_git'), '100') - self.assertEqual(self.get_context_variable('revision'), '100') - self.assertEqual(self.get_context_variable('path'), 'some/path') - self.assertIsNone(self.get_context_variable('message')) - self.assertEqual(self.get_context_variable('result'), stub_result1) - - mock_api.api_revision_directory.assert_called_once_with( - '100', 'some/path', with_data=True) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_history_directory_ko_not_found(self, mock_api): - # given - mock_api.api_revision_history_directory.side_effect = NotFoundExc( - 'not found') - - # when - rv = self.client.get('/browse/revision/123/history/456/directory/a/b/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision-directory.html') - self.assertEqual(self.get_context_variable('sha1_git_root'), '123') - self.assertEqual(self.get_context_variable('sha1_git'), '456') - self.assertEqual(self.get_context_variable('path'), 'a/b') - self.assertEqual(self.get_context_variable('message'), 'not found') - self.assertIsNone(self.get_context_variable('result')) - - mock_api.api_revision_history_directory.assert_called_once_with( - '123', '456', 'a/b', with_data=True) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_history_directory_ko_bad_input(self, mock_api): - # given - mock_api.api_revision_history_directory.side_effect = BadInputExc( - 'bad input') - - # when - rv = self.client.get('/browse/revision/123/history/456/directory/a/c/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision-directory.html') - self.assertEqual(self.get_context_variable('sha1_git_root'), '123') - self.assertEqual(self.get_context_variable('sha1_git'), '456') - self.assertEqual(self.get_context_variable('path'), 'a/c') - self.assertEqual(self.get_context_variable('message'), 'bad input') - self.assertIsNone(self.get_context_variable('result')) - - mock_api.api_revision_history_directory.assert_called_once_with( - '123', '456', 'a/c', with_data=True) - - @patch('swh.web.ui.views.browse.service') - @istest - def browse_revision_history_directory_ok_no_trailing_slash_so_redirect( - self, mock_service): - # when - rv = self.client.get('/browse/revision/1/history/2/directory/path/to') - - # then - self.assertEqual(rv.status_code, 301) - - @patch('swh.web.ui.views.browse.service') - @istest - def browse_revision_history_directory_ok_same_sha1_redirects( - self, mock_service): - # when - rv = self.client.get('/browse/revision/1/history/1/directory/path/to') - - # then - self.assertEqual(rv.status_code, 301) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_history_directory(self, mock_api): - # given - stub_result0 = { - 'type': 'dir', - 'revision': '1000', - 'content': [{ - 'id': 'some-result', - 'type': 'file', - 'name': 'blah' - }] - } - - mock_api.api_revision_history_directory.return_value = stub_result0 - - stub_result1 = { - 'type': 'dir', - 'revision': '1000', - 'content': [{ - 'id': 'some-result', - 'type': 'file', - 'name': 'blah' - }] - } - - # when - rv = self.client.get('/browse/revision/100/history/999/directory/' - 'path/to/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision-directory.html') - self.assertEqual(self.get_context_variable('sha1_git_root'), '100') - self.assertEqual(self.get_context_variable('sha1_git'), '999') - self.assertEqual(self.get_context_variable('revision'), '1000') - self.assertEqual(self.get_context_variable('path'), 'path/to') - self.assertIsNone(self.get_context_variable('message')) - self.assertEqual(self.get_context_variable('result'), stub_result1) - - mock_api.api_revision_history_directory.assert_called_once_with( - '100', '999', 'path/to', with_data=True) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_history_through_origin_ko_bad_input(self, mock_api): - # given - mock_api.api_revision_history_through_origin.side_effect = BadInputExc( - 'Problem input.') # noqa - - # when - rv = self.client.get('/browse/revision/origin/99' - '/history/123/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision.html') - self.assertIsNone(self.get_context_variable('revision')) - self.assertEqual(self.get_context_variable('message'), - 'Problem input.') - - mock_api.api_revision_history_through_origin.assert_called_once_with( - 99, 'refs/heads/master', None, '123') - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_history_through_origin_ko_not_found(self, mock_api): - # given - mock_api.api_revision_history_through_origin.side_effect = NotFoundExc( - 'Not found.') - - # when - rv = self.client.get('/browse/revision/origin/999/' - 'branch/dev/history/123/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision.html') - self.assertIsNone(self.get_context_variable('revision')) - self.assertEqual(self.get_context_variable('message'), - 'Not found.') - - mock_api.api_revision_history_through_origin.assert_called_once_with( - 999, 'dev', None, '123') - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_history_through_origin_ko_other_error(self, mock_api): - # given - mock_api.api_revision_history_through_origin.side_effect = ValueError( - 'Other Error.') - - # when - rv = self.client.get('/browse/revision/origin/438' - '/branch/scratch' - '/ts/2016' - '/history/789/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision.html') - self.assertIsNone(self.get_context_variable('revision')) - self.assertEqual(self.get_context_variable('message'), - 'Other Error.') - - mock_api.api_revision_history_through_origin.assert_called_once_with( - 438, 'scratch', '2016', '789') - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_history_through_origin(self, mock_api): - # given - stub_rev = { - 'id': 'some-id', - 'author': {}, - 'committer': {} - } - mock_api.api_revision_history_through_origin.return_value = stub_rev - - # when - rv = self.client.get('/browse/revision/origin/99/history/123/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision.html') - self.assertEqual(self.get_context_variable('revision'), stub_rev) - self.assertIsNone(self.get_context_variable('message')) - - mock_api.api_revision_history_through_origin.assert_called_once_with( - 99, 'refs/heads/master', None, '123') - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_with_origin_ko_not_found(self, mock_api): - # given - mock_api.api_revision_with_origin.side_effect = NotFoundExc( - 'Not found') - - # when - rv = self.client.get('/browse/revision/origin/1/') - - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision.html') - self.assertIsNone(self.get_context_variable('revision')) - self.assertEqual(self.get_context_variable('message'), 'Not found') - - mock_api.api_revision_with_origin.assert_called_once_with( - 1, 'refs/heads/master', None) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_with_origin_ko_bad_input(self, mock_api): - # given - mock_api.api_revision_with_origin.side_effect = BadInputExc( - 'Bad Input') - - # when - rv = self.client.get('/browse/revision/origin/1000/branch/dev/') - - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision.html') - self.assertIsNone(self.get_context_variable('revision')) - self.assertEqual(self.get_context_variable('message'), 'Bad Input') - - mock_api.api_revision_with_origin.assert_called_once_with( - 1000, 'dev', None) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_with_origin_ko_other(self, mock_api): - # given - mock_api.api_revision_with_origin.side_effect = ValueError( - 'Other') - - # when - rv = self.client.get('/browse/revision/origin/1999' - '/branch/scratch/master' - '/ts/1990-01-10/') - - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision.html') - self.assertIsNone(self.get_context_variable('revision')) - self.assertEqual(self.get_context_variable('message'), 'Other') - - mock_api.api_revision_with_origin.assert_called_once_with( - 1999, 'scratch/master', '1990-01-10') - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_with_origin(self, mock_api): - # given - stub_rev = {'id': 'some-id', - 'author': {}, - 'committer': {}} - mock_api.api_revision_with_origin.return_value = stub_rev - - # when - rv = self.client.get('/browse/revision/origin/1/') - - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision.html') - self.assertEqual(self.get_context_variable('revision'), stub_rev) - self.assertIsNone(self.get_context_variable('message')) - - mock_api.api_revision_with_origin.assert_called_once_with( - 1, 'refs/heads/master', None) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_directory_through_origin_ko_not_found(self, mock_api): - # given - mock_api.api_directory_through_revision_origin.side_effect = BadInputExc( # noqa - 'this is not the robot you are looking for') - - # when - rv = self.client.get('/browse/revision/origin/2' - '/directory/') - - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision-directory.html') - self.assertIsNone(self.get_context_variable('result')) - self.assertEqual(self.get_context_variable('message'), - 'this is not the robot you are looking for') - - mock_api.api_directory_through_revision_origin.assert_called_once_with( # noqa - 2, 'refs/heads/master', None, None, with_data=True) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_directory_through_origin_ko_bad_input(self, mock_api): - # given - mock_api.api_directory_through_revision_origin.side_effect = BadInputExc( # noqa - 'Bad Robot') - - # when - rv = self.client.get('/browse/revision/origin/2' - '/directory/') - - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision-directory.html') - self.assertIsNone(self.get_context_variable('result')) - self.assertEqual(self.get_context_variable('message'), 'Bad Robot') - - mock_api.api_directory_through_revision_origin.assert_called_once_with( - 2, 'refs/heads/master', None, None, with_data=True) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_directory_through_origin_ko_other(self, mock_api): - # given - mock_api.api_directory_through_revision_origin.side_effect = ValueError( # noqa - 'Other bad stuff') - - # when - rv = self.client.get('/browse/revision/origin/2' - '/directory/') - - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision-directory.html') - self.assertIsNone(self.get_context_variable('result')) - self.assertEqual(self.get_context_variable('message'), - 'Other bad stuff') - - mock_api.api_directory_through_revision_origin.assert_called_once_with( - 2, 'refs/heads/master', None, None, with_data=True) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_revision_directory_through_origin(self, mock_api): - # given - stub_res = {'id': 'some-id', - 'revision': 'some-rev-id', - 'type': 'dir', - 'content': 'some-content'} - mock_api.api_directory_through_revision_origin.return_value = stub_res - - # when - rv = self.client.get('/browse/revision/origin/2' - '/branch/dev' - '/ts/2013-20-20 10:02' - '/directory/some/file/') - - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision-directory.html') - self.assertEqual(self.get_context_variable('result'), stub_res) - self.assertIsNone(self.get_context_variable('message')) - - mock_api.api_directory_through_revision_origin.assert_called_once_with( - 2, 'dev', '2013-20-20 10:02', 'some/file', with_data=True) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_directory_through_revision_with_origin_history_ko_not_found( - self, mock_api): - mock_api.api_directory_through_revision_with_origin_history.side_effect = NotFoundExc( # noqa - 'Not found!') - - # when - rv = self.client.get('/browse/revision/origin/987' - '/history/sha1git' - '/directory/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision-directory.html') - self.assertIsNone(self.get_context_variable('result')) - self.assertEqual(self.get_context_variable('message'), 'Not found!') - self.assertEqual(self.get_context_variable('path'), '.') - - mock_api.api_directory_through_revision_with_origin_history.assert_called_once_with( # noqa - 987, 'refs/heads/master', None, 'sha1git', None, with_data=True) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_directory_through_revision_with_origin_history_ko_bad_input( - self, mock_api): - mock_api.api_directory_through_revision_with_origin_history.side_effect = BadInputExc( # noqa - 'Bad input! Bleh!') - - # when - rv = self.client.get('/browse/revision/origin/798' - '/branch/refs/heads/dev' - '/ts/2012-11-11' - '/history/1234' - '/directory/some/path/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision-directory.html') - self.assertIsNone(self.get_context_variable('result')) - self.assertEqual(self.get_context_variable('message'), - 'Bad input! Bleh!') - self.assertEqual(self.get_context_variable('path'), 'some/path') - - mock_api.api_directory_through_revision_with_origin_history.assert_called_once_with( # noqa - 798, 'refs/heads/dev', '2012-11-11', '1234', 'some/path', - with_data=True) - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_directory_through_revision_with_origin_history( - self, mock_api): - stub_dir = {'type': 'dir', - 'content': [], - 'revision': 'specific-rev-id'} - mock_api.api_directory_through_revision_with_origin_history.return_value = stub_dir # noqa - - # when - rv = self.client.get('/browse/revision/origin/101010' - '/ts/1955-11-12' - '/history/54628' - '/directory/emacs-24.5/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('revision-directory.html') - self.assertEqual(self.get_context_variable('result'), stub_dir) - self.assertIsNone(self.get_context_variable('message')) - self.assertEqual(self.get_context_variable('path'), 'emacs-24.5') - - mock_api.api_directory_through_revision_with_origin_history.assert_called_once_with( # noqa - 101010, 'refs/heads/master', '1955-11-12', '54628', 'emacs-24.5', - with_data=True) - - -class EntityView(test_app.SWHViewTestCase): - render_template = False - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_entity_ko_not_found(self, mock_api): - # given - mock_api.api_entity_by_uuid.side_effect = NotFoundExc('Not found!') - - # when - rv = self.client.get('/browse/entity/' - '5f4d4c51-498a-4e28-88b3-b3e4e8396cba/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('entity.html') - self.assertEqual(self.get_context_variable('entities'), []) - self.assertEqual(self.get_context_variable('message'), 'Not found!') - - mock_api.api_entity_by_uuid.assert_called_once_with( - '5f4d4c51-498a-4e28-88b3-b3e4e8396cba') - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_entity_ko_bad_input(self, mock_api): - # given - mock_api.api_entity_by_uuid.side_effect = BadInputExc('wrong input!') - - # when - rv = self.client.get('/browse/entity/blah-blah-uuid/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('entity.html') - self.assertEqual(self.get_context_variable('entities'), []) - self.assertEqual(self.get_context_variable('message'), 'wrong input!') - - mock_api.api_entity_by_uuid.assert_called_once_with( - 'blah-blah-uuid') - - @patch('swh.web.ui.views.browse.api') - @istest - def browse_entity(self, mock_api): - # given - stub_entities = [ - {'id': '5f4d4c51-5a9b-4e28-88b3-b3e4e8396cba'}] - mock_api.api_entity_by_uuid.return_value = stub_entities - - # when - rv = self.client.get('/browse/entity/' - '5f4d4c51-5a9b-4e28-88b3-b3e4e8396cba/') - - # then - self.assertEqual(rv.status_code, 200) - self.assert_template_used('entity.html') - self.assertEqual(self.get_context_variable('entities'), stub_entities) - self.assertIsNone(self.get_context_variable('message')) - - mock_api.api_entity_by_uuid.assert_called_once_with( - '5f4d4c51-5a9b-4e28-88b3-b3e4e8396cba') - - -class Lookup(TestCase): - @patch('swh.web.ui.views.browse.api') - @istest - def api_lookup(self, mock_api): - # given - mock_api.api_content_metadata.return_value = {'id': 'blah'} - - # given - r = browse.api_lookup(mock_api.api_content_metadata, 'sha1:blah') - - # then - self.assertEquals(r, {'id': 'blah'}) - mock_api.api_content_metadata.assert_called_once_with('sha1:blah') - - @patch('swh.web.ui.views.browse.api') - @istest - def api_lookup_not_found(self, mock_api): - # given - mock_api.api_content_filetype.side_effect = NotFoundExc - - # given - r = browse.api_lookup(mock_api.api_content_filetype, 'sha1_git:foo') - - # then - self.assertIsNone(r) - mock_api.api_content_filetype.assert_called_once_with('sha1_git:foo') - - @patch('swh.web.ui.views.browse.api') - @istest - def api_lookup_bad_input(self, mock_api): - # given - mock_api.api_content_license.side_effect = BadInputExc - - # given - r = browse.api_lookup(mock_api.api_content_license, 'sha1_git:foo') - - # then - self.assertIsNone(r) - mock_api.api_content_license.assert_called_once_with('sha1_git:foo') diff --git a/swh/web/ui/tests/views/test_main.py b/swh/web/ui/tests/views/test_main.py deleted file mode 100644 index 3d9af0cb..00000000 --- a/swh/web/ui/tests/views/test_main.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (C) 2016 The Software Heritage developers -# See the AUTHORS file at the top-level directory of this distribution -# License: GNU Affero General Public License version 3, or any later version -# See top-level LICENSE file for more information - -from nose.tools import istest - -from .. import test_app - - -class MainViewTestCase(test_app.SWHViewTestCase): - render_template = False - - @istest - def homepage(self): - # when - rv = self.client.get('/') - - # then - self.assertEquals(rv.status_code, 302) - self.assertRedirects(rv, '/api/') - - # @istest - # def info(self): - # # when - # rv = self.client.get('/about/') - - # self.assertEquals(rv.status_code, 200) - # self.assert_template_used('about.html') - # self.assertIn(b'About', rv.data) diff --git a/swh/web/ui/views/__init__.py b/swh/web/ui/views/__init__.py deleted file mode 100644 index db406c07..00000000 --- a/swh/web/ui/views/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (C) 2016 The Software Heritage developers -# See the AUTHORS file at the top-level directory of this distribution -# License: GNU Affero General Public License version 3, or any later version -# See top-level LICENSE file for more information - -from . import main, api, browse, errorhandler # noqa: only for side effects diff --git a/swh/web/ui/views/api.py b/swh/web/ui/views/api.py deleted file mode 100644 index 221ced83..00000000 --- a/swh/web/ui/views/api.py +++ /dev/null @@ -1,1107 +0,0 @@ -# Copyright (C) 2015-2017 The Software Heritage developers -# See the AUTHORS file at the top-level directory of this distribution -# License: GNU Affero General Public License version 3, or any later version -# See top-level LICENSE file for more information - -import functools - -from types import GeneratorType - -from flask import render_template, request, url_for - -from swh.web.ui import service, utils, apidoc as doc -from swh.web.ui.exc import NotFoundExc, ForbiddenExc -from swh.web.ui.main import app - - -# canned doc string snippets that are used in several doc strings - -_doc_arg_content_id = """A "[hash_type:]hash" content identifier, where - hash_type is one of "sha1" (the default), "sha1_git", "sha256", and hash is - a checksum obtained with the hash_type hashing algorithm.""" -_doc_arg_last_elt = 'element to start listing from, for pagination purposes' -_doc_arg_per_page = 'number of elements to list, for pagination purposes' - -_doc_exc_bad_id = 'syntax error in the given identifier(s)' -_doc_exc_id_not_found = 'no object matching the given criteria could be found' - -_doc_ret_revision_meta = 'metadata of the revision identified by sha1_git' -_doc_ret_revision_log = """list of dictionaries representing the metadata of - each revision found in the commit log heading to revision sha1_git. - For each commit at least the following information are returned: - author/committer, authoring/commit timestamps, revision id, commit message, - parent (i.e., immediately preceding) commits, "root" directory id.""" - -_doc_header_link = """indicates that a subsequent result page is available, - pointing to it""" - - -def _api_lookup(lookup_fn, *args, - notfound_msg='Object not found', - enrich_fn=lambda x: x): - """Capture a redundant behavior of: - - looking up the backend with a criteria (be it an identifier or checksum) - passed to the function lookup_fn - - if nothing is found, raise an NotFoundExc exception with error - message notfound_msg. - - Otherwise if something is returned: - - either as list, map or generator, map the enrich_fn function to it - and return the resulting data structure as list. - - either as dict and pass to enrich_fn and return the dict enriched. - - Args: - - criteria: discriminating criteria to lookup - - lookup_fn: function expects one criteria and optional supplementary - *args. - - notfound_msg: if nothing matching the criteria is found, - raise NotFoundExc with this error message. - - enrich_fn: Function to use to enrich the result returned by - lookup_fn. Default to the identity function if not provided. - - *args: supplementary arguments to pass to lookup_fn. - - Raises: - NotFoundExp or whatever `lookup_fn` raises. - - """ - res = lookup_fn(*args) - if not res: - raise NotFoundExc(notfound_msg) - if isinstance(res, (map, list, GeneratorType)): - return [enrich_fn(x) for x in res] - return enrich_fn(res) - - -@app.route('/api/1/') -def api_endpoints(): - """Display the list of opened api endpoints. - - """ - routes = doc.APIUrls.get_app_endpoints() - # Return a list of routes with consistent ordering - env = { - 'doc_routes': sorted(routes.items()) - } - return render_template('api-endpoints.html', **env) - - -@app.route('/api/') -def api_doc(): - """Display the API's documentation. - - """ - return render_template('api.html') - - -@app.route('/api/1/stat/counters/') -@doc.route('/api/1/stat/counters/', noargs=True) -@doc.returns(rettype=doc.rettypes.dict, - retdoc="""dictionary mapping object types to the amount of - corresponding objects currently available in the archive""") -def api_stats(): - """Get statistics about the content of the archive. - - """ - return service.stat_counters() - - -@app.route('/api/1/origin//visits/') -@doc.route('/api/1/origin/visits/') -@doc.arg('origin_id', - default=1, - argtype=doc.argtypes.int, - argdoc='software origin identifier') -@doc.header('Link', doc=_doc_header_link) -@doc.param('last_visit', default=None, - argtype=doc.argtypes.int, - doc=_doc_arg_last_elt) -@doc.param('per_page', default=10, - argtype=doc.argtypes.int, - doc=_doc_arg_per_page) -@doc.returns(rettype=doc.rettypes.list, - retdoc="""a list of dictionaries describing individual visits. - For each visit, its identifier, timestamp (as UNIX time), outcome, - and visit-specific URL for more information are given.""") -def api_origin_visits(origin_id): - """Get information about all visits of a given software origin. - - """ - result = {} - per_page = int(request.args.get('per_page', '10')) - last_visit = request.args.get('last_visit') - if last_visit: - last_visit = int(last_visit) - - def _lookup_origin_visits( - origin_id, last_visit=last_visit, per_page=per_page): - return service.lookup_origin_visits( - origin_id, last_visit=last_visit, per_page=per_page) - - def _enrich_origin_visit(origin_visit): - ov = origin_visit.copy() - ov['origin_visit_url'] = url_for('api_origin_visit', - origin_id=ov['origin'], - visit_id=ov['visit']) - return ov - - r = _api_lookup( - _lookup_origin_visits, origin_id, - notfound_msg='No origin {} found'.format(origin_id), - enrich_fn=_enrich_origin_visit) - - if r: - l = len(r) - if l == per_page: - new_last_visit = r[-1]['visit'] - params = { - 'origin_id': origin_id, - 'last_visit': new_last_visit - } - - if request.args.get('per_page'): - params['per_page'] = per_page - - result['headers'] = { - 'link-next': url_for('api_origin_visits', **params) - } - - result.update({ - 'results': r - }) - - return result - - -@app.route('/api/1/origin//visit//') -@doc.route('/api/1/origin/visit/') -@doc.arg('origin_id', - default=1, - argtype=doc.argtypes.int, - argdoc='software origin identifier') -@doc.arg('visit_id', - default=1, - argtype=doc.argtypes.int, - argdoc="""visit identifier, relative to the origin identified by - origin_id""") -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.dict, - retdoc="""dictionary containing both metadata for the entire - visit (e.g., timestamp as UNIX time, visit outcome, etc.) and what - was at the software origin during the visit (i.e., a mapping from - branches to other archive objects)""") -def api_origin_visit(origin_id, visit_id): - """Get information about a specific visit of a software origin. - - """ - def _enrich_origin_visit(origin_visit): - ov = origin_visit.copy() - ov['origin_url'] = url_for('api_origin', origin_id=ov['origin']) - if 'occurrences' in ov: - ov['occurrences'] = { - k: utils.enrich_object(v) - for k, v in ov['occurrences'].items() - } - return ov - - return _api_lookup( - service.lookup_origin_visit, origin_id, visit_id, - notfound_msg=('No visit {} for origin {} found' - .format(visit_id, origin_id)), - enrich_fn=_enrich_origin_visit) - - -@app.route('/api/1/content/symbol/', methods=['POST']) -@app.route('/api/1/content/symbol//') -@doc.route('/api/1/content/symbol/', tags=['upcoming']) -@doc.arg('q', - default='hello', - argtype=doc.argtypes.str, - argdoc="""An expression string to lookup in swh's raw content""") -@doc.header('Link', doc=_doc_header_link) -@doc.param('last_sha1', default=None, - argtype=doc.argtypes.str, - doc=_doc_arg_last_elt) -@doc.param('per_page', default=10, - argtype=doc.argtypes.int, - doc=_doc_arg_per_page) -@doc.returns(rettype=doc.rettypes.list, - retdoc="""A list of dict whose content matches the expression. - Each dict has the following keys: - - id (bytes): identifier of the content - - name (text): symbol whose content match the expression - - kind (text): kind of the symbol that matched - - lang (text): Language for that entry - - line (int): Number line for the symbol - - """) -def api_content_symbol(q=None): - """Search content objects by `Ctags `_-style - symbol (e.g., function name, data type, method, ...). - - """ - result = {} - last_sha1 = request.args.get('last_sha1', None) - per_page = int(request.args.get('per_page', '10')) - - def lookup_exp(exp, last_sha1=last_sha1, per_page=per_page): - return service.lookup_expression(exp, last_sha1, per_page) - - symbols = _api_lookup( - lookup_exp, q, - notfound_msg="No indexed raw content match expression '{}'.".format(q), - enrich_fn=functools.partial(utils.enrich_content, top_url=True)) - - if symbols: - l = len(symbols) - - if l == per_page: - new_last_sha1 = symbols[-1]['sha1'] - params = { - 'q': q, - 'last_sha1': new_last_sha1, - } - - if request.args.get('per_page'): - params['per_page'] = per_page - - result['headers'] = { - 'link-next': url_for('api_content_symbol', **params), - } - - result.update({ - 'results': symbols - }) - - return result - - -@app.route('/api/1/content/known/', methods=['POST']) -@app.route('/api/1/content/known//') -@doc.route('/api/1/content/known/', tags=['hidden']) -@doc.arg('q', - default='adc83b19e793491b1c6ea0fd8b46cd9f32e592fc', - argtype=doc.argtypes.sha1, - argdoc='content identifier as a sha1 checksum') -# @doc.param('q', default=None, -# argtype=doc.argtypes.str, -# doc="""(POST request) An algo_hash:hash string, where algo_hash -# is one of sha1, sha1_git or sha256 and hash is the hash to -# search for in SWH""") -@doc.raises(exc=doc.excs.badinput, doc=_doc_exc_bad_id) -@doc.returns(rettype=doc.rettypes.dict, - retdoc="""a dictionary with results (found/not found for each given - identifier) and statistics about how many identifiers were - found""") -def api_check_content_known(q=None): - """Check whether some content (AKA "blob") is present in the archive. - - Lookup can be performed by various means: - - - a GET request with one or several hashes, separated by ',' - - a POST request with one or several hashes, passed as (multiple) values - for parameter 'q' - - """ - response = {'search_res': None, - 'search_stats': None} - search_stats = {'nbfiles': 0, 'pct': 0} - search_res = None - - queries = [] - # GET: Many hash separated values request - if q: - hashes = q.split(',') - for v in hashes: - queries.append({'filename': None, 'sha1': v}) - - # POST: Many hash requests in post form submission - elif request.method == 'POST': - data = request.form - # Remove potential inputs with no associated value - for k, v in data.items(): - if v is not None: - if k == 'q' and len(v) > 0: - queries.append({'filename': None, 'sha1': v}) - elif v != '': - queries.append({'filename': k, 'sha1': v}) - - if queries: - lookup = service.lookup_multiple_hashes(queries) - result = [] - l = len(queries) - for el in lookup: - res_d = {'sha1': el['sha1'], - 'found': el['found']} - if 'filename' in el and el['filename']: - res_d['filename'] = el['filename'] - result.append(res_d) - search_res = result - nbfound = len([x for x in lookup if x['found']]) - search_stats['nbfiles'] = l - search_stats['pct'] = (nbfound / l) * 100 - - response['search_res'] = search_res - response['search_stats'] = search_stats - return response - - -@app.route('/api/1/origin//') -@app.route('/api/1/origin//url/') -@doc.route('/api/1/origin/') -@doc.arg('origin_id', - default=1, - argtype=doc.argtypes.int, - argdoc='origin identifier (when looking up by ID)') -@doc.arg('origin_type', - default='git', - argtype=doc.argtypes.str, - argdoc='origin type (when looking up by type+URL)') -@doc.arg('origin_url', - default='https://github.com/hylang/hy', - argtype=doc.argtypes.path, - argdoc='origin URL (when looking up by type+URL') -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.dict, - retdoc="""The metadata of the origin corresponding to the given - criteria""") -def api_origin(origin_id=None, origin_type=None, origin_url=None): - """Get information about a software origin. - - Software origins might be looked up by origin type and canonical URL (e.g., - "git" + a "git clone" URL), or by their unique (but otherwise meaningless) - identifier. - - """ - ori_dict = { - 'id': origin_id, - 'type': origin_type, - 'url': origin_url - } - ori_dict = {k: v for k, v in ori_dict.items() if ori_dict[k]} - if 'id' in ori_dict: - error_msg = 'Origin with id %s not found.' % ori_dict['id'] - else: - error_msg = 'Origin with type %s and URL %s not found' % ( - ori_dict['type'], ori_dict['url']) - - def _enrich_origin(origin): - if 'id' in origin: - o = origin.copy() - o['origin_visits_url'] = url_for('api_origin_visits', - origin_id=o['id']) - return o - - return origin - - return _api_lookup( - service.lookup_origin, ori_dict, - notfound_msg=error_msg, - enrich_fn=_enrich_origin) - - -@app.route('/api/1/person//') -@doc.route('/api/1/person/') -@doc.arg('person_id', - default=42, - argtype=doc.argtypes.int, - argdoc='person identifier') -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.dict, - retdoc='The metadata of the person identified by person_id') -def api_person(person_id): - """Get information about a person. - - """ - return _api_lookup( - service.lookup_person, person_id, - notfound_msg='Person with id {} not found.'.format(person_id)) - - -@app.route('/api/1/release//') -@doc.route('/api/1/release/') -@doc.arg('sha1_git', - default='7045404f3d1c54e6473c71bbb716529fbad4be24', - argtype=doc.argtypes.sha1_git, - argdoc='release identifier') -@doc.raises(exc=doc.excs.badinput, doc=_doc_exc_bad_id) -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.dict, - retdoc='The metadata of the release identified by sha1_git') -def api_release(sha1_git): - """Get information about a release. - - Releases are identified by SHA1 checksums, compatible with Git tag - identifiers. See ``release_identifier`` in our `data model module - `_ - for details about how they are computed. - - """ - error_msg = 'Release with sha1_git %s not found.' % sha1_git - return _api_lookup( - service.lookup_release, sha1_git, - notfound_msg=error_msg, - enrich_fn=utils.enrich_release) - - -def _revision_directory_by(revision, path, request_path, - limit=100, with_data=False): - """Compute the revision matching criterion's directory or content data. - - Args: - revision: dictionary of criterions representing a revision to lookup - path: directory's path to lookup - request_path: request path which holds the original context to - limit: optional query parameter to limit the revisions log - (default to 100). For now, note that this limit could impede the - transitivity conclusion about sha1_git not being an ancestor of - with_data: indicate to retrieve the content's raw data if path resolves - to a content. - - """ - def enrich_directory_local(dir, context_url=request_path): - return utils.enrich_directory(dir, context_url) - - rev_id, result = service.lookup_directory_through_revision( - revision, path, limit=limit, with_data=with_data) - - content = result['content'] - if result['type'] == 'dir': # dir_entries - result['content'] = list(map(enrich_directory_local, content)) - else: # content - result['content'] = utils.enrich_content(content) - - return result - - -@app.route('/api/1/revision' - '/origin/' - '/directory/') -@app.route('/api/1/revision' - '/origin/' - '/directory//') -@app.route('/api/1/revision' - '/origin/' - '/branch/' - '/directory/') -@app.route('/api/1/revision' - '/origin/' - '/branch/' - '/directory//') -@app.route('/api/1/revision' - '/origin/' - '/branch/' - '/ts/' - '/directory/') -@app.route('/api/1/revision' - '/origin/' - '/branch/' - '/ts/' - '/directory//') -@doc.route('/api/1/revision/origin/directory/', tags=['hidden']) -@doc.arg('origin_id', - default=1, - argtype=doc.argtypes.int, - argdoc="The revision's origin's SWH identifier") -@doc.arg('branch_name', - default='refs/heads/master', - argtype=doc.argtypes.path, - argdoc="""The optional branch for the given origin (default - to master""") -@doc.arg('ts', - default='2000-01-17T11:23:54+00:00', - argtype=doc.argtypes.ts, - argdoc="""Optional timestamp (default to the nearest time - crawl of timestamp)""") -@doc.arg('path', - default='Dockerfile', - argtype=doc.argtypes.path, - argdoc='The path to the directory or file to display') -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.dict, - retdoc="""The metadata of the revision corresponding to the - given criteria""") -def api_directory_through_revision_origin(origin_id, - branch_name="refs/heads/master", - ts=None, - path=None, - with_data=False): - """Display directory or content information through a revision identified - by origin/branch/timestamp. - """ - if ts: - ts = utils.parse_timestamp(ts) - - return _revision_directory_by( - { - 'origin_id': origin_id, - 'branch_name': branch_name, - 'ts': ts - }, - path, - request.path, - with_data=with_data) - - -@app.route('/api/1/revision' - '/origin//') -@app.route('/api/1/revision' - '/origin/' - '/branch//') -@app.route('/api/1/revision' - '/origin/' - '/branch/' - '/ts//') -@app.route('/api/1/revision' - '/origin/' - '/ts//') -@doc.route('/api/1/revision/origin/') -@doc.arg('origin_id', - default=1, - argtype=doc.argtypes.int, - argdoc='software origin identifier') -@doc.arg('branch_name', - default='refs/heads/master', - argtype=doc.argtypes.path, - argdoc="""(optional) fully-qualified branch name, e.g., - "refs/heads/master". Defaults to the master branch.""") -@doc.arg('ts', - default=None, - argtype=doc.argtypes.ts, - argdoc="""(optional) timestamp close to which the revision pointed by - the given branch should be looked up. Defaults to now.""") -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.dict, retdoc=_doc_ret_revision_meta) -def api_revision_with_origin(origin_id, - branch_name="refs/heads/master", - ts=None): - """Get information about a revision, searching for it based on software - origin, branch name, and/or visit timestamp. - - This endpoint behaves like ``/revision``, but operates on the revision that - has been found at a given software origin, close to a given point in time, - pointed by a given branch. - - """ - ts = utils.parse_timestamp(ts) - return _api_lookup( - service.lookup_revision_by, origin_id, branch_name, ts, - notfound_msg=('Revision with (origin_id: {}, branch_name: {}' - ', ts: {}) not found.'.format(origin_id, - branch_name, ts)), - enrich_fn=utils.enrich_revision) - - -@app.route('/api/1/revision//prev//') -@doc.route('/api/1/revision/prev/', tags=['hidden']) -@doc.arg('sha1_git', - default='ec72c666fb345ea5f21359b7bc063710ce558e39', - argtype=doc.argtypes.sha1_git, - argdoc="The revision's sha1_git identifier") -@doc.arg('context', - default='6adc4a22f20bbf3bbc754f1ec8c82be5dfb5c71a', - argtype=doc.argtypes.path, - argdoc='The navigation breadcrumbs -- use at your own risk') -@doc.raises(exc=doc.excs.badinput, doc=_doc_exc_bad_id) -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.dict, - retdoc='The metadata of the revision identified by sha1_git') -def api_revision_with_context(sha1_git, context): - """Return information about revision with id sha1_git. - """ - def _enrich_revision(revision, context=context): - return utils.enrich_revision(revision, context) - - return _api_lookup( - service.lookup_revision, sha1_git, - notfound_msg='Revision with sha1_git %s not found.' % sha1_git, - enrich_fn=_enrich_revision) - - -@app.route('/api/1/revision//') -@doc.route('/api/1/revision/') -@doc.arg('sha1_git', - default='aafb16d69fd30ff58afdd69036a26047f3aebdc6', - argtype=doc.argtypes.sha1_git, - argdoc="revision identifier") -@doc.raises(exc=doc.excs.badinput, doc=_doc_exc_bad_id) -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.dict, retdoc=_doc_ret_revision_meta) -def api_revision(sha1_git): - """Get information about a revision. - - Revisions are identified by SHA1 checksums, compatible with Git commit - identifiers. See ``revision_identifier`` in our `data model module - `_ - for details about how they are computed. - - """ - return _api_lookup( - service.lookup_revision, sha1_git, - notfound_msg='Revision with sha1_git {} not found.'.format(sha1_git), - enrich_fn=utils.enrich_revision) - - -@app.route('/api/1/revision//raw/') -@doc.route('/api/1/revision/raw/', tags=['hidden']) -@doc.arg('sha1_git', - default='ec72c666fb345ea5f21359b7bc063710ce558e39', - argtype=doc.argtypes.sha1_git, - argdoc="The queried revision's sha1_git identifier") -@doc.raises(exc=doc.excs.badinput, doc=_doc_exc_bad_id) -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.octet_stream, - retdoc="""The message of the revision identified by sha1_git - as a downloadable octet stream""") -def api_revision_raw_message(sha1_git): - """Return the raw data of the message of revision identified by sha1_git - """ - raw = service.lookup_revision_message(sha1_git) - return app.response_class(raw['message'], - headers={'Content-disposition': 'attachment;' - 'filename=rev_%s_raw' % sha1_git}, - mimetype='application/octet-stream') - - -@app.route('/api/1/revision//directory/') -@app.route('/api/1/revision//directory//') -@doc.route('/api/1/revision/directory/') -@doc.arg('sha1_git', - default='ec72c666fb345ea5f21359b7bc063710ce558e39', - argtype=doc.argtypes.sha1_git, - argdoc='revision identifier') -@doc.arg('dir_path', - default='Documentation/BUG-HUNTING', - argtype=doc.argtypes.path, - argdoc="""path relative to the root directory of revision identifier by - sha1_git""") -@doc.raises(exc=doc.excs.badinput, doc=_doc_exc_bad_id) -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.dict, - retdoc="""either a list of directory entries with their metadata, - or the metadata of a single directory entry""") -def api_revision_directory(sha1_git, - dir_path=None, - with_data=False): - """Get information about directory (entry) objects associated to revisions. - - Each revision is associated to a single "root" directory. This endpoint - behaves like ``/directory/``, but operates on the root directory associated - to a given revision. - - """ - return _revision_directory_by( - { - 'sha1_git': sha1_git - }, - dir_path, - request.path, - with_data=with_data) - - -@app.route('/api/1/revision//log/') -@app.route('/api/1/revision//prev//log/') -@doc.route('/api/1/revision/log/') -@doc.arg('sha1_git', - default='37fc9e08d0c4b71807a4f1ecb06112e78d91c283', - argtype=doc.argtypes.sha1_git, - argdoc='revision identifier') -# @doc.arg('prev_sha1s', -# default='6adc4a22f20bbf3bbc754f1ec8c82be5dfb5c71a', -# argtype=doc.argtypes.path, -# argdoc="""(Optional) Navigation breadcrumbs (descendant revisions -# previously visited). If multiple values, use / as delimiter. """) -@doc.header('Link', doc=_doc_header_link) -@doc.param('per_page', default=10, - argtype=doc.argtypes.int, - doc=_doc_arg_per_page) -@doc.raises(exc=doc.excs.badinput, doc=_doc_exc_bad_id) -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.dict, retdoc=_doc_ret_revision_log) -def api_revision_log(sha1_git, prev_sha1s=None): - """Get a list of all revisions heading to a given one, i.e., show the - commit log. - - """ - result = {} - per_page = int(request.args.get('per_page', '10')) - - def lookup_revision_log_with_limit(s, limit=per_page+1): - return service.lookup_revision_log(s, limit) - - error_msg = 'Revision with sha1_git %s not found.' % sha1_git - rev_get = _api_lookup(lookup_revision_log_with_limit, sha1_git, - notfound_msg=error_msg, - enrich_fn=utils.enrich_revision) - - l = len(rev_get) - if l == per_page+1: - rev_backward = rev_get[:-1] - new_last_sha1 = rev_get[-1]['id'] - params = { - 'sha1_git': new_last_sha1, - } - - if request.args.get('per_page'): - params['per_page'] = per_page - - result['headers'] = { - 'link-next': url_for('api_revision_log', **params) - } - - else: - rev_backward = rev_get - - if not prev_sha1s: # no nav breadcrumbs, so we're done - revisions = rev_backward - - else: - rev_forward_ids = prev_sha1s.split('/') - rev_forward = _api_lookup( - service.lookup_revision_multiple, rev_forward_ids, - notfound_msg=error_msg, - enrich_fn=utils.enrich_revision) - revisions = rev_forward + rev_backward - - result.update({ - 'results': revisions - }) - return result - - -@app.route('/api/1/revision' - '/origin//log/') -@app.route('/api/1/revision' - '/origin/' - '/branch//log/') -@app.route('/api/1/revision' - '/origin/' - '/branch/' - '/ts//log/') -@app.route('/api/1/revision' - '/origin/' - '/ts//log/') -@doc.route('/api/1/revision/origin/log/') -@doc.arg('origin_id', - default=1, - argtype=doc.argtypes.int, - argdoc="The revision's SWH origin identifier") -@doc.arg('branch_name', - default='refs/heads/master', - argtype=doc.argtypes.path, - argdoc="""(Optional) The revision's branch name within the origin specified. -Defaults to 'refs/heads/master'.""") -@doc.arg('ts', - default='2000-01-17T11:23:54+00:00', - argtype=doc.argtypes.ts, - argdoc="""(Optional) A time or timestamp string to parse""") -@doc.header('Link', doc=_doc_header_link) -@doc.param('per_page', default=10, - argtype=doc.argtypes.int, - doc=_doc_arg_per_page) -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.dict, retdoc=_doc_ret_revision_log) -def api_revision_log_by(origin_id, - branch_name='refs/heads/master', - ts=None): - """Show the commit log for a revision, searching for it based on software - origin, branch name, and/or visit timestamp. - - This endpoint behaves like ``/log``, but operates on the revision that - has been found at a given software origin, close to a given point in time, - pointed by a given branch. - """ - result = {} - per_page = int(request.args.get('per_page', '10')) - - if ts: - ts = utils.parse_timestamp(ts) - - def lookup_revision_log_by_with_limit(o_id, br, ts, limit=per_page+1): - return service.lookup_revision_log_by(o_id, br, ts, limit) - - error_msg = 'No revision matching origin %s ' % origin_id - error_msg += ', branch name %s' % branch_name - error_msg += (' and time stamp %s.' % ts) if ts else '.' - - rev_get = _api_lookup( - lookup_revision_log_by_with_limit, origin_id, branch_name, ts, - notfound_msg=error_msg, - enrich_fn=utils.enrich_revision) - l = len(rev_get) - if l == per_page+1: - revisions = rev_get[:-1] - last_sha1_git = rev_get[-1]['id'] - - params = { - 'origin_id': origin_id, - 'branch_name': branch_name, - 'ts': ts, - 'sha1_git': last_sha1_git, - } - - if request.args.get('per_page'): - params['per_page'] = per_page - - result['headers'] = { - 'link-next': url_for('api_revision_log_by', **params), - } - - else: - revisions = rev_get - - result.update({'results': revisions}) - - return result - - -@app.route('/api/1/directory//') -@app.route('/api/1/directory///') -@doc.route('/api/1/directory/') -@doc.arg('sha1_git', - default='1bd0e65f7d2ff14ae994de17a1e7fe65111dcad8', - argtype=doc.argtypes.sha1_git, - argdoc='directory identifier') -@doc.arg('path', - default='codec/demux', - argtype=doc.argtypes.path, - argdoc='path relative to directory identified by sha1_git') -@doc.raises(exc=doc.excs.badinput, doc=_doc_exc_bad_id) -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.dict, - retdoc="""either a list of directory entries with their metadata, - or the metadata of a single directory entry""") -def api_directory(sha1_git, - path=None): - """Get information about directory or directory entry objects. - - Directories are identified by SHA1 checksums, compatible with Git directory - identifiers. See ``directory_identifier`` in our `data model module - `_ - for details about how they are computed. - - When given only a directory identifier, this endpoint returns information - about the directory itself, returning its content (usually a list of - directory entries). When given a directory identifier and a path, this - endpoint returns information about the directory entry pointed by the - relative path, starting path resolution from the given directory. - - """ - if path: - error_msg_path = ('Entry with path %s relative to directory ' - 'with sha1_git %s not found.') % (path, sha1_git) - return _api_lookup( - service.lookup_directory_with_path, sha1_git, path, - notfound_msg=error_msg_path, - enrich_fn=utils.enrich_directory) - else: - error_msg_nopath = 'Directory with sha1_git %s not found.' % sha1_git - return _api_lookup( - service.lookup_directory, sha1_git, - notfound_msg=error_msg_nopath, - enrich_fn=utils.enrich_directory) - - -@app.route('/api/1/content//provenance/') -@doc.route('/api/1/content/provenance/', tags=['hidden']) -@doc.arg('q', - default='sha1_git:88b9b366facda0b5ff8d8640ee9279bed346f242', - argtype=doc.argtypes.algo_and_hash, - argdoc=_doc_arg_content_id) -@doc.raises(exc=doc.excs.badinput, doc=_doc_exc_bad_id) -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.dict, - retdoc="""List of provenance information (dict) for the matched -content.""") -def api_content_provenance(q): - """Return content's provenance information if any. - - """ - def _enrich_revision(provenance): - p = provenance.copy() - p['revision_url'] = url_for('api_revision', - sha1_git=provenance['revision']) - p['content_url'] = url_for('api_content_metadata', - q='sha1_git:%s' % provenance['content']) - p['origin_url'] = url_for('api_origin', - origin_id=provenance['origin']) - p['origin_visits_url'] = url_for('api_origin_visits', - origin_id=provenance['origin']) - p['origin_visit_url'] = url_for('api_origin_visit', - origin_id=provenance['origin'], - visit_id=provenance['visit']) - return p - - return _api_lookup( - service.lookup_content_provenance, q, - notfound_msg='Content with {} not found.'.format(q), - enrich_fn=_enrich_revision) - - -@app.route('/api/1/content//filetype/') -@doc.route('/api/1/content/filetype/', tags=['upcoming']) -@doc.arg('q', - default='sha1:1fc6129a692e7a87b5450e2ba56e7669d0c5775d', - argtype=doc.argtypes.algo_and_hash, - argdoc=_doc_arg_content_id) -@doc.raises(exc=doc.excs.badinput, doc=_doc_exc_bad_id) -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.dict, - retdoc="""Filetype information (dict) for the matched -content.""") -def api_content_filetype(q): - """Get information about the detected MIME type of a content object. - - """ - return _api_lookup( - service.lookup_content_filetype, q, - notfound_msg='No filetype information found for content {}.'.format(q), - enrich_fn=utils.enrich_metadata_endpoint) - - -@app.route('/api/1/content//language/') -@doc.route('/api/1/content/language/', tags=['upcoming']) -@doc.arg('q', - default='sha1:1fc6129a692e7a87b5450e2ba56e7669d0c5775d', - argtype=doc.argtypes.algo_and_hash, - argdoc=_doc_arg_content_id) -@doc.raises(exc=doc.excs.badinput, doc=_doc_exc_bad_id) -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.dict, - retdoc="""Language information (dict) for the matched -content.""") -def api_content_language(q): - """Get information about the detected (programming) language of a content - object. - - """ - return _api_lookup( - service.lookup_content_language, q, - notfound_msg='No language information found for content {}.'.format(q), - enrich_fn=utils.enrich_metadata_endpoint) - - -@app.route('/api/1/content//license/') -@doc.route('/api/1/content/license/', tags=['upcoming']) -@doc.arg('q', - default='sha1:1fc6129a692e7a87b5450e2ba56e7669d0c5775d', - argtype=doc.argtypes.algo_and_hash, - argdoc=_doc_arg_content_id) -@doc.raises(exc=doc.excs.badinput, doc=_doc_exc_bad_id) -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.dict, - retdoc="""License information (dict) for the matched -content.""") -def api_content_license(q): - """Get information about the detected license of a content object. - - """ - return _api_lookup( - service.lookup_content_license, q, - notfound_msg='No license information found for content {}.'.format(q), - enrich_fn=utils.enrich_metadata_endpoint) - - -@app.route('/api/1/content//ctags/') -@doc.route('/api/1/content/ctags/', tags=['upcoming']) -@doc.arg('q', - default='sha1:1fc6129a692e7a87b5450e2ba56e7669d0c5775d', - argtype=doc.argtypes.algo_and_hash, - argdoc=_doc_arg_content_id) -@doc.raises(exc=doc.excs.badinput, doc=_doc_exc_bad_id) -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.dict, - retdoc="""Ctags symbol (dict) for the matched -content.""") -def api_content_ctags(q): - """Get information about all `Ctags `_-style - symbols defined in a content object. - - """ - return _api_lookup( - service.lookup_content_ctags, q, - notfound_msg='No ctags symbol found for content {}.'.format(q), - enrich_fn=utils.enrich_metadata_endpoint) - - -@app.route('/api/1/content//raw/') -@doc.route('/api/1/content/raw/') -@doc.arg('q', - default='adc83b19e793491b1c6ea0fd8b46cd9f32e592fc', - argtype=doc.argtypes.algo_and_hash, - argdoc=_doc_arg_content_id) -@doc.param('filename', default=None, - argtype=doc.argtypes.str, - doc='User\'s desired filename. If provided, the downloaded' - ' content will get that filename.') -@doc.raises(exc=doc.excs.badinput, doc=_doc_exc_bad_id) -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.octet_stream, - retdoc='The raw content data as an octet stream') -def api_content_raw(q): - """Get the raw content of a content object (AKA "blob"), as a byte - sequence. - - """ - def generate(content): - yield content['data'] - - content_raw = service.lookup_content_raw(q) - if not content_raw: - raise NotFoundExc('Content %s is not found.' % q) - - content_filetype = service.lookup_content_filetype(q) - if not content_filetype: - raise NotFoundExc('Content %s is not available for download.' % q) - - mimetype = content_filetype['mimetype'] - if 'text/' not in mimetype: - raise ForbiddenExc('Only textual content is available for download. ' - 'Actual content mimetype is %s.' % mimetype) - - filename = request.args.get('filename') - if not filename: - filename = 'content_%s_raw' % q.replace(':', '_') - - return app.response_class(generate(content_raw), - headers={'Content-disposition': 'attachment;' - 'filename=%s' % filename}, - mimetype='application/octet-stream') - - -@app.route('/api/1/content//') -@doc.route('/api/1/content/') -@doc.arg('q', - default='adc83b19e793491b1c6ea0fd8b46cd9f32e592fc', - argtype=doc.argtypes.algo_and_hash, - argdoc=_doc_arg_content_id) -@doc.raises(exc=doc.excs.badinput, doc=_doc_exc_bad_id) -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.dict, - retdoc="""known metadata for content identified by q""") -def api_content_metadata(q): - """Get information about a content (AKA "blob") object. - - """ - return _api_lookup( - service.lookup_content, q, - notfound_msg='Content with {} not found.'.format(q), - enrich_fn=utils.enrich_content) - - -@app.route('/api/1/entity//') -@doc.route('/api/1/entity/', tags=['hidden']) -@doc.arg('uuid', - default='5f4d4c51-498a-4e28-88b3-b3e4e8396cba', - argtype=doc.argtypes.uuid, - argdoc="The entity's uuid identifier") -@doc.raises(exc=doc.excs.badinput, doc=_doc_exc_bad_id) -@doc.raises(exc=doc.excs.notfound, doc=_doc_exc_id_not_found) -@doc.returns(rettype=doc.rettypes.dict, - retdoc='The metadata of the entity identified by uuid') -def api_entity_by_uuid(uuid): - """Return content information if content is found. - - """ - return _api_lookup( - service.lookup_entity_by_uuid, uuid, - notfound_msg="Entity with uuid '%s' not found." % uuid, - enrich_fn=utils.enrich_entity) diff --git a/swh/web/ui/views/browse.py b/swh/web/ui/views/browse.py deleted file mode 100644 index d23123da..00000000 --- a/swh/web/ui/views/browse.py +++ /dev/null @@ -1,1015 +0,0 @@ -# Copyright (C) 2015-2017 The Software Heritage developers -# See the AUTHORS file at the top-level directory of this distribution -# License: GNU Affero General Public License version 3, or any later version -# See top-level LICENSE file for more information - -import dateutil.parser - -from encodings.aliases import aliases -from flask import render_template, request, url_for, redirect - -from swh.core.utils import grouper -from .. import service, utils -from ..exc import BadInputExc, NotFoundExc -from ..main import app -from . import api - - -def api_lookup(api_fn, query): - """Lookup api with api_fn function with parameter query. - - Example: - filetype = api_lookup('api_content_filetype', 'sha1:blah') - if filetype: - content['mimetype'] = filetype['mimetype'] - """ - try: - return api_fn(query) - except (NotFoundExc, BadInputExc): - return None - - -@app.route('/origin/search/') -def search_origin(): - """ - Redirect request with GET params for an origin to our fragmented URI scheme - - """ - if request.method == 'GET': - data = request.args - origin_id = data.get('origin_id') - if origin_id: - return redirect(url_for('browse_origin', origin_id=origin_id)) - args = ['origin_type', 'origin_url'] - values = {arg: data.get(arg) for arg in args if data.get(arg)} - if 'origin_type' in values and 'origin_url' in values: - return redirect(url_for('browse_origin', **values)) - - -@app.route('/directory/search/') -def search_directory(): - """ - Redirect request with GET params for a directory to our fragmented - URI scheme - - """ - - def url_for_filtered(endpoint, **kwargs): - """Make url_for ignore keyword args that have an empty string for value - """ - filtered = {k: v for k, v in kwargs.items() if kwargs[k]} - return url_for(endpoint, **filtered) - - if request.method == 'GET': - data = request.args - sha1_git = data.get('sha1_git') - if sha1_git: - if 'dir_path' in data: - # dir_path exists only in requests for a revision's directory - return redirect(url_for_filtered( - 'browse_revision_directory', - sha1_git=sha1_git, - dir_path=data.get('dir_path') - )) - - return redirect(url_for_filtered( - 'browse_directory', - sha1_git=sha1_git, - path=data.get('path') - )) - - args = ['origin_id', 'branch_name', 'ts', 'path'] - values = {arg: data.get(arg) for arg in args if data.get(arg)} - if 'origin_id' in values: - return redirect(url_for('browse_revision_directory_through_origin', - **values)) - - -@app.route('/revision/search/') -def search_revision(): - """ - Redirect request with GET params for a revision to our fragmented - URI scheme - - """ - if request.method == 'GET': - data = request.args - sha1_git = data.get('sha1_git') - if sha1_git: - return redirect(url_for('browse_revision', sha1_git=sha1_git)) - args = ['origin_id', 'branch_name', 'ts'] - values = {arg: data.get(arg) for arg in args if data.get(arg)} - if 'origin_id' in values: - return redirect(url_for('browse_revision_with_origin', **values)) - - -@app.route('/content/symbol/', methods=['GET']) -def search_symbol(): - """Search for symbols in contents. - - Returns: - dict representing data to look for in swh storage. - - """ - env = { - 'result': None, - 'per_page': None, - 'message': '', - 'linknext': None, - 'linkprev': None, - } - - # Read form or get information - data = request.args - q = data.get('q') - per_page = data.get('per_page') - - env['q'] = q - if per_page: - env['per_page'] = per_page - - if q: - try: - result = api.api_content_symbol(q) - if result: - headers = result.get('headers') - result = utils.prepare_data_for_view(result['results']) - env['result'] = result - if headers: - if 'link-next' in headers: - next_last_sha1 = result[-1]['sha1'] - params = { - 'q': q, - 'last_sha1': next_last_sha1, - } - if per_page: - params['per_page'] = per_page - - env['linknext'] = url_for('search_symbol', **params) - - except BadInputExc as e: - env['message'] = str(e) - - return render_template('symbols.html', **env) - - -@app.route('/content/search/', methods=['GET', 'POST']) -def search_content(): - """Search for hashes in swh-storage. - - One form to submit either: - - hash query to look up in swh storage - - file hashes calculated client-side to be queried in swh storage - - both - - Returns: - dict representing data to look for in swh storage. - The following keys are returned: - - search_stats: {'nbfiles': X, 'pct': Y} the number of total - queried files and percentage of files not in storage respectively - - responses: array of {'filename': X, 'sha1': Y, 'found': Z} - - messages: General messages. - TODO: - Batch-process with all checksums, not just sha1 - """ - env = {'search_res': None, - 'search_stats': None, - 'message': []} - - search_stats = {'nbfiles': 0, 'pct': 0} - search_res = None - message = '' - - # Get with a single hash request - if request.method == 'POST': - # Post form submission with many hash requests - q = None - else: - data = request.args - q = data.get('q') - - try: - search = api.api_check_content_known(q) - search_res = search['search_res'] - search_stats = search['search_stats'] - except BadInputExc as e: - message = str(e) - - env['search_stats'] = search_stats - env['search_res'] = search_res - env['message'] = message - return render_template('search.html', **env) - - -@app.route('/browse/') -def browse(): - """Render the user-facing browse view - """ - return render_template('browse.html') - - -@app.route('/browse/content//') -def browse_content(q): - """Given a hash and a checksum, display the content's meta-data. - - Args: - q is of the form algo_hash:hash with algo_hash in - (sha1, sha1_git, sha256) - - Returns: - Information on one possible origin for such content. - - Raises: - BadInputExc in case of unknown algo_hash or bad hash - NotFoundExc if the content is not found. - - """ - env = {'q': q, - 'message': None, - 'content': None} - - encoding = request.args.get('encoding', 'utf8') - if encoding not in aliases: - env['message'] = 'Encoding %s not supported.' \ - 'Supported Encodings: %s' % ( - encoding, list(aliases.keys())) - return render_template('content.html', **env) - - try: - content = api.api_content_metadata(q) - filetype = api_lookup(api.api_content_filetype, q) - if filetype: - content['mimetype'] = filetype.get('mimetype') - content['encoding'] = filetype.get('encoding') - else: - content['mimetype'] = None - content['encoding'] = None - - language = api_lookup(api.api_content_language, q) - if language: - content['language'] = language.get('lang') - else: - content['language'] = None - - licenses = api_lookup(api.api_content_license, q) - if licenses: - content['licenses'] = ', '.join(licenses.get('licenses', [])) - else: - content['licenses'] = None - - content_raw = service.lookup_content_raw(q) - if content_raw: - content['data'] = content_raw['data'] - else: - content['data'] = None - - ctags = api_lookup(api.api_content_ctags, q) - if ctags: - url = url_for('browse_content', q=q) - content['ctags'] = grouper(( - '%s' % ( - url, - ctag['line'], - ctag['line']) - for ctag in ctags - ), 20) - else: - content['ctags'] = None - - env['content'] = utils.prepare_data_for_view(content, - encoding=encoding) - except (NotFoundExc, BadInputExc) as e: - env['message'] = str(e) - - return render_template('content.html', **env) - - -@app.route('/browse/content//raw/') -def browse_content_raw(q): - """Given a hash and a checksum, display the content's raw data. - - Args: - q is of the form algo_hash:hash with algo_hash in - (sha1, sha1_git, sha256) - - Returns: - Information on one possible origin for such content. - - Raises: - BadInputExc in case of unknown algo_hash or bad hash - NotFoundExc if the content is not found. - - """ - return redirect(url_for('api_content_raw', q=q)) - - -def _origin_seen(q, data): - """Given an origin, compute a message string with the right information. - - Args: - origin: a dictionary with keys: - - origin: a dictionary with type and url keys - - occurrence: a dictionary with a validity range - - Returns: - Message as a string - - """ - origin_type = data['origin_type'] - origin_url = data['origin_url'] - revision = data['revision'] - branch = data['branch'] - path = data['path'] - - return """The content with hash %s has been seen on origin with type '%s' -at url '%s'. The revision was identified at '%s' on branch '%s'. -The file's path referenced was '%s'.""" % (q, - origin_type, - origin_url, - revision, - branch, - path) - - -# @app.route('/browse/content//origin/') -def browse_content_with_origin(q): - """Show content information. - - Args: - - q: query string of the form with - `algo_hash` in sha1, sha1_git, sha256. - - This means that several different URLs (at least one per - HASH_ALGO) will point to the same content sha: the sha with - 'hash' format - - Returns: - The content's information at for a given checksum. - - """ - env = {'q': q} - - try: - origin = api.api_content_checksum_to_origin(q) - message = _origin_seen(q, origin) - except (NotFoundExc, BadInputExc) as e: - message = str(e) - - env['message'] = message - return render_template('content-with-origin.html', **env) - - -@app.route('/browse/directory//') -@app.route('/browse/directory///') -def browse_directory(sha1_git, path=None): - """Show directory information. - - Args: - - sha1_git: the directory's sha1 git identifier. If path - is set, the base directory for the relative path to the entry - - path: the path to the requested entry, relative to - the directory pointed by sha1_git - - Returns: - The content's information at sha1_git, or at sha1_git/path if - path is set. - """ - env = {'sha1_git': sha1_git, - 'files': []} - try: - if path: - env['message'] = ('Listing for directory with path %s from %s:' - % (path, sha1_git)) - dir_or_file = service.lookup_directory_with_path( - sha1_git, path) - if dir_or_file['type'] == 'file': - fsha = 'sha256:%s' % dir_or_file['sha256'] - content = api.api_content_metadata(fsha) - content_raw = service.lookup_content_raw(fsha) - if content_raw: # FIXME: currently assuming utf8 encoding - content['data'] = content_raw['data'] - env['content'] = utils.prepare_data_for_view( - content, encoding='utf-8') - return render_template('content.html', **env) - else: - directory_files = api.api_directory(dir_or_file['target']) - env['files'] = utils.prepare_data_for_view(directory_files) - else: - env['message'] = "Listing for directory %s:" % sha1_git - directory_files = api.api_directory(sha1_git) - env['files'] = utils.prepare_data_for_view(directory_files) - except (NotFoundExc, BadInputExc) as e: - env['message'] = str(e) - - return render_template('directory.html', **env) - - -@app.route('/browse/origin//url//') -@app.route('/browse/origin//') -def browse_origin(origin_id=None, origin_type=None, origin_url=None): - """Browse origin matching given criteria - either origin_id or - origin_type and origin_path. - - Args: - - origin_id: origin's swh identifier - - origin_type: origin's type - - origin_url: origin's URL - """ - # URLs for the calendar JS plugin - env = {'browse_url': None, - 'visit_url': None, - 'origin': None} - - try: - origin = api.api_origin(origin_id, origin_type, origin_url) - env['origin'] = origin - env['browse_url'] = url_for('browse_revision_with_origin', - origin_id=origin['id']) - env['visit_url'] = url_for('browse_origin_visits', - origin_id=origin['id']) - - except (NotFoundExc, BadInputExc) as e: - env['message'] = str(e) - - return render_template('origin.html', **env) - - -@app.route('/browse/origin//visits/') -def browse_origin_visits(origin_id): - origin = api.api_origin_visits(origin_id)['results'] - for v in origin: - v['date'] = dateutil.parser.parse(v['date']).timestamp() - return origin - - -@app.route('/browse/person//') -def browse_person(person_id): - """Browse person with id id. - - """ - env = {'person_id': person_id, - 'person': None, - 'message': None} - - try: - env['person'] = api.api_person(person_id) - except (NotFoundExc, BadInputExc) as e: - env['message'] = str(e) - - return render_template('person.html', **env) - - -@app.route('/browse/release//') -def browse_release(sha1_git): - """Browse release with sha1_git. - - """ - env = {'sha1_git': sha1_git, - 'message': None, - 'release': None} - - try: - rel = api.api_release(sha1_git) - env['release'] = utils.prepare_data_for_view(rel) - except (NotFoundExc, BadInputExc) as e: - env['message'] = str(e) - - return render_template('release.html', **env) - - -@app.route('/browse/revision//') -@app.route('/browse/revision//prev//') -def browse_revision(sha1_git, prev_sha1s=None): - """Browse the revision with git SHA1 sha1_git_cur, while optionally keeping - the context from which we came as a list of previous (i.e. later) - revisions' sha1s. - - Args: - sha1_git: the requested revision's sha1_git. - prev_sha1s: an optional string of /-separated sha1s representing our - context, ordered by descending revision date. - - Returns: - Information about revision of git SHA1 sha1_git_cur, with relevant URLS - pointing to the context augmented with sha1_git_cur. - - Example: - GET /browse/revision/ - """ - env = {'sha1_git': sha1_git, - 'message': None, - 'revision': None} - - try: - rev = api.api_revision(sha1_git, prev_sha1s) - env['revision'] = utils.prepare_data_for_view(rev) - except (NotFoundExc, BadInputExc) as e: - env['message'] = str(e) - return render_template('revision.html', **env) - - -@app.route('/browse/revision//raw/') -def browse_revision_raw_message(sha1_git): - """Given a sha1_git, display the corresponding revision's raw message. - - """ - return redirect(url_for('api_revision_raw_message', sha1_git=sha1_git)) - - -@app.route('/browse/revision//log/') -@app.route('/browse/revision//prev//log/') -def browse_revision_log(sha1_git, prev_sha1s=None): - """Browse revision with sha1_git's log. If the navigation path through the - commit tree is specified, we intersect the earliest revision's log with the - revisions the user browsed through - ie the path taken to the specified - revision. - - Args: - sha1_git: the current revision's SHA1_git checksum - prev_sha1s: optionally, the path through which we want log information - """ - env = {'sha1_git': sha1_git, - 'sha1_url': '/browse/revision/%s/' % sha1_git, - 'message': None, - 'revisions': []} - - try: - revision_data = api.api_revision_log(sha1_git, prev_sha1s) - revisions = revision_data['revisions'] - next_revs_url = revision_data['next_revs_url'] - env['revisions'] = map(utils.prepare_data_for_view, revisions) - env['next_revs_url'] = utils.prepare_data_for_view(next_revs_url) - - except (NotFoundExc, BadInputExc) as e: - env['message'] = str(e) - - return render_template('revision-log.html', **env) - - -@app.route('/browse/revision' - '/origin//log/') -@app.route('/browse/revision' - '/origin/' - '/branch//log/') -@app.route('/browse/revision' - '/origin/' - '/branch/' - '/ts//log/') -@app.route('/browse/revision' - '/origin/' - '/ts//log/') -def browse_revision_log_by(origin_id, - branch_name='refs/heads/master', - timestamp=None): - """Browse the revision described by origin, branch name and timestamp's - log - - Args: - origin_id: the revision's origin - branch_name: the revision's branch - timestamp: the requested timeframe for the revision - - Returns: - The revision log of the described revision as a list of revisions - if it is found. - """ - env = {'sha1_git': None, - 'origin_id': origin_id, - 'origin_url': '/browse/origin/%d/' % origin_id, - 'branch_name': branch_name, - 'timestamp': timestamp, - 'message': None, - 'revisions': []} - - try: - revisions = api.api_revision_log_by( - origin_id, branch_name, timestamp) - env['revisions'] = map(utils.prepare_data_for_view, revisions) - except (NotFoundExc, BadInputExc) as e: - env['message'] = str(e) - - return render_template('revision-log.html', **env) - - -@app.route('/browse/revision//prev//') -def browse_with_rev_context(sha1_git_cur, sha1s): - """Browse the revision with git SHA1 sha1_git_cur, while keeping the context - from which we came as a list of previous (i.e. later) revisions' sha1s. - - Args: - sha1_git_cur: the requested revision's sha1_git. - sha1s: a string of /-separated sha1s representing our context, ordered - by descending revision date. - - Returns: - Information about revision of git SHA1 sha1_git_cur, with relevant URLS - pointing to the context augmented with sha1_git_cur. - - Example: - GET /browse/revision/ - """ - env = {'sha1_git': sha1_git_cur, - 'message': None, - 'revision': None} - - try: - revision = api.api_revision( - sha1_git_cur, sha1s) - env['revision'] = utils.prepare_data_for_view(revision) - except (BadInputExc, NotFoundExc) as e: - env['message'] = str(e) - - return render_template('revision.html', **env) - - -@app.route('/browse/revision//history//') -def browse_revision_history(sha1_git_root, sha1_git): - """Display information about revision sha1_git, limited to the - sub-graph of all transitive parents of sha1_git_root. - - In other words, sha1_git is an ancestor of sha1_git_root. - - Args: - sha1_git_root: latest revision of the browsed history. - sha1_git: one of sha1_git_root's ancestors. - limit: optional query parameter to limit the revisions log - (default to 100). For now, note that this limit could impede the - transitivity conclusion about sha1_git not being an ancestor of - sha1_git_root (even if it is). - - Returns: - Information on sha1_git if it is an ancestor of sha1_git_root - including children leading to sha1_git_root. - - """ - env = {'sha1_git_root': sha1_git_root, - 'sha1_git': sha1_git, - 'message': None, - 'keys': [], - 'revision': None} - - if sha1_git == sha1_git_root: - return redirect(url_for('browse_revision', - sha1_git=sha1_git)) - - try: - revision = api.api_revision_history(sha1_git_root, - sha1_git) - env['revision'] = utils.prepare_data_for_view(revision) - except (BadInputExc, NotFoundExc) as e: - env['message'] = str(e) - - return render_template('revision.html', **env) - - -@app.route('/browse/revision//directory/') -@app.route('/browse/revision//directory//') -def browse_revision_directory(sha1_git, dir_path=None): - """Browse directory from revision with sha1_git. - - """ - env = { - 'sha1_git': sha1_git, - 'path': '.' if not dir_path else dir_path, - 'message': None, - 'result': None - } - - encoding = request.args.get('encoding', 'utf8') - if encoding not in aliases: - env['message'] = 'Encoding %s not supported.' \ - 'Supported Encodings: %s' % ( - encoding, list(aliases.keys())) - return render_template('revision-directory.html', **env) - - try: - result = api.api_revision_directory(sha1_git, dir_path, with_data=True) - result['content'] = utils.prepare_data_for_view(result['content'], - encoding=encoding) - env['revision'] = result['revision'] - env['result'] = result - except (BadInputExc, NotFoundExc) as e: - env['message'] = str(e) - - return render_template('revision-directory.html', **env) - - -@app.route('/browse/revision/' - '/history/' - '/directory/') -@app.route('/browse/revision/' - '/history/' - '/directory//') -def browse_revision_history_directory(sha1_git_root, sha1_git, path=None): - """Return information about directory pointed to by the revision - defined as: revision sha1_git, limited to the sub-graph of all - transitive parents of sha1_git_root. - - Args: - sha1_git_root: latest revision of the browsed history. - sha1_git: one of sha1_git_root's ancestors. - path: optional directory pointed to by that revision. - limit: optional query parameter to limit the revisions log - (default to 100). For now, note that this limit could impede the - transitivity conclusion about sha1_git not being an ancestor of - sha1_git_root (even if it is). - - Returns: - Information on the directory pointed to by that revision. - - Raises: - BadInputExc in case of unknown algo_hash or bad hash. - NotFoundExc if either revision is not found or if sha1_git is not an - ancestor of sha1_git_root or the path referenced does not exist - - """ - env = { - 'sha1_git_root': sha1_git_root, - 'sha1_git': sha1_git, - 'path': '.' if not path else path, - 'message': None, - 'result': None - } - - encoding = request.args.get('encoding', 'utf8') - if encoding not in aliases: - env['message'] = 'Encoding %s not supported.' \ - 'Supported Encodings: %s' % ( - encoding, list(aliases.keys())) - return render_template('revision-directory.html', **env) - - if sha1_git == sha1_git_root: - return redirect(url_for('browse_revision_directory', - sha1_git=sha1_git, - path=path, - encoding=encoding), - code=301) - - try: - result = api.api_revision_history_directory(sha1_git_root, - sha1_git, - path, - with_data=True) - env['revision'] = result['revision'] - env['content'] = utils.prepare_data_for_view(result['content'], - encoding=encoding) - env['result'] = result - except (BadInputExc, NotFoundExc) as e: - env['message'] = str(e) - - return render_template('revision-directory.html', **env) - - -@app.route('/browse/revision' - '/origin/' - '/history/' - '/directory/') -@app.route('/browse/revision' - '/origin/' - '/history/' - '/directory//') -@app.route('/browse/revision' - '/origin/' - '/branch/' - '/history/' - '/directory/') -@app.route('/browse/revision' - '/origin/' - '/branch/' - '/history/' - '/directory//') -@app.route('/browse/revision' - '/origin/' - '/ts/' - '/history/' - '/directory/') -@app.route('/browse/revision' - '/origin/' - '/ts/' - '/history/' - '/directory//') -@app.route('/browse/revision' - '/origin/' - '/branch/' - '/ts/' - '/history/' - '/directory/') -@app.route('/browse/revision' - '/origin/' - '/branch/' - '/ts/' - '/history/' - '/directory//') -def browse_directory_through_revision_with_origin_history( - origin_id, - branch_name="refs/heads/master", - ts=None, - sha1_git=None, - path=None): - env = { - 'origin_id': origin_id, - 'branch_name': branch_name, - 'ts': ts, - 'sha1_git': sha1_git, - 'path': '.' if not path else path, - 'message': None, - 'result': None - } - - encoding = request.args.get('encoding', 'utf8') - if encoding not in aliases: - env['message'] = (('Encoding %s not supported.' - 'Supported Encodings: %s') % ( - encoding, list(aliases.keys()))) - return render_template('revision-directory.html', **env) - - try: - result = api.api_directory_through_revision_with_origin_history( - origin_id, branch_name, ts, sha1_git, path, with_data=True) - env['revision'] = result['revision'] - env['content'] = utils.prepare_data_for_view(result['content'], - encoding=encoding) - env['result'] = result - except (BadInputExc, NotFoundExc) as e: - env['message'] = str(e) - - return render_template('revision-directory.html', **env) - - -@app.route('/browse/revision' - '/origin/') -@app.route('/browse/revision' - '/origin//') -@app.route('/browse/revision' - '/origin/' - '/branch//') -@app.route('/browse/revision' - '/origin/' - '/branch/' - '/ts//') -@app.route('/browse/revision' - '/origin/' - '/ts//') -def browse_revision_with_origin(origin_id, - branch_name="refs/heads/master", - ts=None): - """Instead of having to specify a (root) revision by SHA1_GIT, users - might want to specify a place and a time. In SWH a "place" is an - origin; a "time" is a timestamp at which some place has been - observed by SWH crawlers. - - Args: - origin_id: origin's identifier (default to 1). - branch_name: the optional branch for the given origin (default - to master). - timestamp: optional timestamp (default to the nearest time - crawl of timestamp). - - Returns: - Information on the revision if found. - - Raises: - BadInputExc in case of unknown algo_hash or bad hash. - NotFoundExc if the revision is not found. - - """ - env = {'message': None, - 'revision': None} - try: - revision = api.api_revision_with_origin(origin_id, - branch_name, - ts) - env['revision'] = utils.prepare_data_for_view(revision) - except (ValueError, NotFoundExc, BadInputExc) as e: - env['message'] = str(e) - - return render_template('revision.html', **env) - - -@app.route('/browse/revision' - '/origin/' - '/history//') -@app.route('/browse/revision' - '/origin/' - '/branch/' - '/history//') -@app.route('/browse/revision' - '/origin/' - '/branch/' - '/ts/' - '/history//') -def browse_revision_history_through_origin(origin_id, - branch_name='refs/heads/master', - ts=None, - sha1_git=None): - """Return information about revision sha1_git, limited to the - sub-graph of all transitive parents of the revision root identified - by (origin_id, branch_name, ts). - Given sha1_git_root such root revision's identifier, in other words, - sha1_git is an ancestor of sha1_git_root. - - Args: - origin_id: origin's identifier (default to 1). - branch_name: the optional branch for the given origin (default - to master). - timestamp: optional timestamp (default to the nearest time - crawl of timestamp). - sha1_git: one of sha1_git_root's ancestors. - limit: optional query parameter to limit the revisions log - (default to 100). For now, note that this limit could impede the - transitivity conclusion about sha1_git not being an ancestor of - sha1_git_root (even if it is). - - Returns: - Information on sha1_git if it is an ancestor of sha1_git_root - including children leading to sha1_git_root. - - """ - env = {'message': None, - 'revision': None} - try: - revision = api.api_revision_history_through_origin( - origin_id, - branch_name, - ts, - sha1_git) - env['revision'] = utils.prepare_data_for_view(revision) - except (ValueError, BadInputExc, NotFoundExc) as e: - env['message'] = str(e) - - return render_template('revision.html', **env) - - -@app.route('/browse/revision' - '/origin/' - '/directory/') -@app.route('/browse/revision' - '/origin/' - '/directory/') -@app.route('/browse/revision' - '/origin/' - '/branch/' - '/directory/') -@app.route('/browse/revision' - '/origin/' - '/branch/' - '/directory//') -@app.route('/browse/revision' - '/origin/' - '/branch/' - '/ts/' - '/directory/') -@app.route('/browse/revision' - '/origin/' - '/branch/' - '/ts/' - '/directory//') -def browse_revision_directory_through_origin(origin_id, - branch_name='refs/heads/master', - ts=None, - path=None): - - env = {'message': None, - 'origin_id': origin_id, - 'ts': ts, - 'path': '.' if not path else path, - 'result': None} - - encoding = request.args.get('encoding', 'utf8') - if encoding not in aliases: - env['message'] = 'Encoding %s not supported.' \ - 'Supported Encodings: %s' % ( - encoding, list(aliases.keys())) - return render_template('revision-directory.html', **env) - - try: - result = api.api_directory_through_revision_origin( - origin_id, - branch_name, - ts, - path, - with_data=True) - - result['content'] = utils.prepare_data_for_view(result['content'], - encoding=encoding) - env['revision'] = result['revision'] - env['result'] = result - except (ValueError, BadInputExc, NotFoundExc) as e: - env['message'] = str(e) - - return render_template('revision-directory.html', **env) - - -@app.route('/browse/entity/') -@app.route('/browse/entity//') -def browse_entity(uuid): - env = {'entities': [], - 'message': None} - - try: - entities = api.api_entity_by_uuid(uuid) - env['entities'] = entities - except (NotFoundExc, BadInputExc) as e: - env['message'] = str(e) - - return render_template('entity.html', **env) diff --git a/swh/web/ui/views/errorhandler.py b/swh/web/ui/views/errorhandler.py deleted file mode 100644 index dc35d3d6..00000000 --- a/swh/web/ui/views/errorhandler.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright (C) 2015-2017 The Software Heritage developers -# See the AUTHORS file at the top-level directory of this distribution -# License: GNU Affero General Public License version 3, or any later version -# See top-level LICENSE file for more information - -from swh.storage.exc import StorageDBError, StorageAPIError - -from .. import renderers -from ..exc import NotFoundExc, ForbiddenExc -from ..main import app - - -@app.errorhandler(ValueError) -def value_error_as_bad_request(error): - """Compute a bad request response and add body as payload. - - """ - return renderers.error_response(400, error) - - -@app.errorhandler(NotFoundExc) -def value_not_found(error): - """Compute a not found response and add body as payload. - - """ - return renderers.error_response(404, error) - - -@app.errorhandler(ForbiddenExc) -def value_forbidden(error): - """Compute a forbidden response and add body as payload. - - """ - return renderers.error_response(403, error) - - -@app.errorhandler(StorageDBError) -@app.errorhandler(StorageAPIError) -def backend_problem(error): - """Compute a not found and add body as payload. - - """ - return renderers.error_response(503, error) diff --git a/swh/web/ui/views/main.py b/swh/web/ui/views/main.py deleted file mode 100644 index 230b8e5f..00000000 --- a/swh/web/ui/views/main.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright (C) 2016 The Software Heritage developers -# See the AUTHORS file at the top-level directory of this distribution -# License: GNU Affero General Public License version 3, or any later version -# See top-level LICENSE file for more information - -import flask - -from ..main import app - - -@app.route('/') -def homepage(): - """Home page - - """ - return flask.redirect(flask.url_for('api_doc')) - - -# @app.route('/about/') -# def about(): -# return flask.render_template('about.html') diff --git a/swh/web/urls.py b/swh/web/urls.py new file mode 100644 index 00000000..668fdb01 --- /dev/null +++ b/swh/web/urls.py @@ -0,0 +1,25 @@ +# Copyright (C) 2017 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +"""swhweb URL Configuration + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/1.11/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.conf.urls import url, include + 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) +""" +from django.conf.urls import url, include + +urlpatterns = [ + url(r'^api/', include('swh.web.api.urls')), +] diff --git a/swh/web/wsgi.py b/swh/web/wsgi.py new file mode 100644 index 00000000..14abb39c --- /dev/null +++ b/swh/web/wsgi.py @@ -0,0 +1,21 @@ +# Copyright (C) 2017 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +""" +WSGI config for swhweb project. + +It exposes the WSGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/ +""" + +import os + +from django.core.wsgi import get_wsgi_application + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "swh.web.settings") + +application = get_wsgi_application()